diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..46d619f78f --- /dev/null +++ b/.editorconfig @@ -0,0 +1,30 @@ +root = true + +[*.java] +indent_style = tab +indent_size = 4 + +[*.adoc] +indent_style = tab +indent_size = 4 + +[*.groovy] +indent_style = tab +indent_size = 4 + +[*.xml] +indent_style = tab +indent_size = 4 + +[*.yml] +indent_style = space +indent_size = 2 + +[*.yaml] +indent_style = space +indent_size = 2 + +[*.sh] +indent_style = space +indent_size = 4 +end_of_line = lf diff --git a/.github/actions/build-images/action.yml b/.github/actions/build-images/action.yml new file mode 100644 index 0000000000..378a9a9c48 --- /dev/null +++ b/.github/actions/build-images/action.yml @@ -0,0 +1,60 @@ +name: 'Build And Publish Images' +description: 'Builds and publishes images with pack' +inputs: + version: + description: 'dataflow version' + required: true + dockerhub-username: + description: 'dockerhub username' + required: true + dockerhub-password: + description: 'dockerhub password' + required: true + GCR_JSON_KEY: + description: 'GCR_JSON_KEY' + required: true +runs: + using: "composite" + steps: + - name: Install pack + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + pack-version: 0.30.0 + + # docker hub login + - name: Login to docker.io + uses: docker/login-action@v3 + with: + username: ${{ inputs.dockerhub-username }} + password: ${{ inputs.dockerhub-password }} + - name: Login to GCR + uses: docker/login-action@v3 + with: + registry: gcr.io + username: _json_key + password: ${{ inputs.GCR_JSON_KEY }} + - name: Build Images + env: + TAG: ${{ inputs.version }} + shell: bash + run: ${{ github.action_path }}/build-images.sh + + # build/publish support images + - name: Publish Grafana Prometheus + uses: docker/build-push-action@v2 + with: + context: src/grafana/prometheus/docker/grafana + push: true + tags: springcloud/spring-cloud-dataflow-grafana-prometheus:${{ inputs.version }} + - name: Publish Grafana InfluxDB + uses: docker/build-push-action@v2 + with: + context: src/grafana/influxdb/docker/grafana + push: true + tags: springcloud/spring-cloud-dataflow-grafana-influxdb:${{ inputs.version }} + - name: Publish Prometheus Local + uses: docker/build-push-action@v2 + with: + context: src/grafana/prometheus/docker/prometheus-local + push: true + tags: springcloud/spring-cloud-dataflow-prometheus-local:${{ inputs.version }} diff --git a/.github/actions/build-images/build-images.sh b/.github/actions/build-images/build-images.sh new file mode 100755 index 0000000000..cc702f8b9d --- /dev/null +++ b/.github/actions/build-images/build-images.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash +if [ "$TAG" == "" ]; then + echo "TAG not found" + exit 1 +fi +if [ "$DEFAULT_JDK" = "" ]; then + echo "DEFAULT_JDK not found using 17" + DEFAULT_JDK=17 +else + echo "DEFAULT_JDK=$DEFAULT_JDK" +fi + +function pack_image { + JAR="$1-$TAG.jar" + REPO="$2" + v="$3" + if [ ! -f "$JAR" ]; then + echo "File not found $JAR" + exit 2 + fi + echo "Creating: $REPO:$TAG-jdk$v" + # --buildpack "paketo-buildpacks/java@10.0.0" --buildpack "paketo-buildpacks/bellsoft-liberica@10.3.2" + pack build --builder paketobuildpacks/builder-jammy-base:latest \ + --path "$JAR" \ + --trust-builder --verbose \ + --env BP_JVM_VERSION=$v "$REPO:$TAG-jdk$v" + RC=$? + if ((RC!=0)); then + echo "Error $RC packaging $JAR" + exit $RC + fi + echo "Created: $REPO:$TAG-jdk$v" +} +LEN=$(jq '.include | length' .github/workflows/images.json) +for ((i = 0; i < LEN; i++)); do + TARGET="$(jq -r --argjson index $i '.include[$index] | .path' .github/workflows/images.json)" + IMAGE="$(jq -r --argjson index $i '.include[$index] | .image' .github/workflows/images.json)" + ARTIFACT_ID="$(jq -r --argjson index $i '.include[$index] | .name' .github/workflows/images.json)" + # 8 11 17 21 + for v in 17 21; do + pack_image "$TARGET/$ARTIFACT_ID" $IMAGE $v $ARTIFACT_ID + RC=$? + if [ $RC -ne 0 ]; then + exit $RC + fi + docker push "$IMAGE:$TAG-jdk$v" + echo "Pushed $IMAGE:$TAG-jdk$v" + if [ "$DEFAULT_JDK" == "$v" ]; then + docker tag "$IMAGE:$TAG-jdk$DEFAULT_JDK" "$IMAGE:$TAG" + docker push "$IMAGE:$TAG" + echo "Pushed $IMAGE:$TAG" + fi + done +done + diff --git a/.github/actions/build-package-bundle/action.yml b/.github/actions/build-package-bundle/action.yml new file mode 100644 index 0000000000..33dcac2e01 --- /dev/null +++ b/.github/actions/build-package-bundle/action.yml @@ -0,0 +1,84 @@ +name: Build Package Bundle +description: Build Package Bundle +inputs: + dataflow-version: + description: dataflow version + required: true + skipper-version: + description: skipper version + required: true + server-version: + description: server version + required: true + server-repository: + description: server repository + required: true + skipper-repository: + description: skipper repository + required: true + ctr-version: + description: ctr version + required: true + package-name: + description: package name + required: true + package-bundle-template: + description: path to package bundle template + required: true + imgpkg-lock-template: + description: path to imgpkg lock template + required: true + config: + description: path to ytt config files dir + required: true + project-directory: + description: The working directory + required: true + SRP_CLIENT_ID: + description: secrets.SRP_CLIENT_ID + required: false + SRP_CLIENT_SECRET: + description: secrets.SRP_CLIENT_SECRET + required: false +outputs: + bundle-path: + description: Location path where bundle was build + value: ${{ steps.tanzu-dance.outputs.bundle-path }} + +runs: + using: "composite" + steps: + - name: Tanzu Dance + id: tanzu-dance + env: + DATAFLOW_VERSION: ${{ inputs.dataflow-version }} + SKIPPER_VERSION: ${{ inputs.skipper-version }} + SKIPPER_REPOSITORY: ${{ inputs.skipper-repository }} + SERVER_VERSION: ${{ inputs.server-version }} + SERVER_REPOSITORY: ${{ inputs.server-repository }} + CTR_VERSION: ${{ inputs.ctr-version }} + PACKAGE_NAME: ${{ inputs.package-name }} + PACKAGE_BUNDLE_TEMPLATE: ${{ inputs.package-bundle-template }} + PACKAGE_BUNDLE_GENERATED: ${{ runner.temp }}/generated/packagebundle + VENDIR_SRC_IN: ${{ inputs.config }} + IMGPKG_LOCK_TEMPLATE: ${{ inputs.imgpkg-lock-template }} + IMGPKG_LOCK_GENERATED_IN: ${{ runner.temp }}/generated/imgpkgin + IMGPKG_LOCK_GENERATED_OUT: ${{ runner.temp }}/generated/imgpkgout + SRP_CLIENT_ID: ${{ inputs.SRP_CLIENT_ID }} + SRP_CLIENT_SECRET: ${{ inputs.SRP_CLIENT_SECRET }} + shell: bash + working-directory: ${{ inputs.project-directory }} + run: | + echo "bundle-path=$PACKAGE_BUNDLE_GENERATED" >> $GITHUB_OUTPUT + SCDF_DIR="${{ inputs.project-directory || '.' }}" + if [ "$USE_SRP" == "true" ]; then + if [ "$SRP_SCRIPTS" == "" ]; then + echo "SRP_SCRIPTS not defined" + exit 2 + fi + export OBSERVATION=package-bundle + export OUTPUT_VERSION=$SERVER_VERSION + $SRP_SCRIPTS/run-under-observer.sh "${{ github.action_path }}/build-package-bundle.sh" + else + "${{ github.action_path }}/build-package-bundle.sh" + fi diff --git a/.github/actions/build-package-bundle/build-package-bundle.sh b/.github/actions/build-package-bundle/build-package-bundle.sh new file mode 100755 index 0000000000..be92cb92ab --- /dev/null +++ b/.github/actions/build-package-bundle/build-package-bundle.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash + +function check_env() { + eval ev='$'$1 + if [ "$ev" == "" ]; then + echo "env var $1 not defined" + if ((sourced != 0)); then + return 1 + else + exit 1 + fi + fi +} + +TMP=$(mktemp -d) +if [ "$PACKAGE_BUNDLE_GENERATED" = "" ]; then + export PACKAGE_BUNDLE_GENERATED="$TMP/generated/packagebundle" +fi +mkdir -p "$PACKAGE_BUNDLE_GENERATED" +if [ "$IMGPKG_LOCK_GENERATED_IN" = "" ]; then + export IMGPKG_LOCK_GENERATED_IN="$TMP/generated/imgpkgin" +fi +mkdir -p "$IMGPKG_LOCK_GENERATED_IN" +if [ "$IMGPKG_LOCK_GENERATED_OUT" = "" ]; then + export IMGPKG_LOCK_GENERATED_OUT="$TMP/generated/imgpkgout" +fi +mkdir -p "$IMGPKG_LOCK_GENERATED_OUT" + +check_env PACKAGE_BUNDLE_TEMPLATE +check_env SERVER_VERSION +check_env SERVER_REPOSITORY +check_env DATAFLOW_VERSION +check_env SKIPPER_VERSION +check_env SKIPPER_REPOSITORY +check_env PACKAGE_NAME +check_env IMGPKG_LOCK_TEMPLATE +check_env VENDIR_SRC_IN + +echo "Build Package Bundle: $PACKAGE_BUNDLE_TEMPLATE package.name=$PACKAGE_NAME, server.repository=$SERVER_REPOSITORY, server.version=$SERVER_VERSION,skipper.repository=$SKIPPER_REPOSITORY, skipper.version=$SKIPPER_VERSION, output=$PACKAGE_BUNDLE_GENERATED" +set +e +time ls > /dev/null 2>&1 +RC=$? +if((RC=0)); then + MEASURE="time -v -o times.txt -a" +else + MEASURE="" +fi +set -e +echo "ytt -f $PACKAGE_BUNDLE_TEMPLATE" > times.txt + +$MEASURE ytt -f "$PACKAGE_BUNDLE_TEMPLATE" \ + --output-files "$PACKAGE_BUNDLE_GENERATED" \ + --data-value-yaml server.version="$SERVER_VERSION" \ + --data-value-yaml server.repository="$SERVER_REPOSITORY" \ + --data-value-yaml ctr.version="$DATAFLOW_VERSION" \ + --data-value-yaml dataflow.version="$DATAFLOW_VERSION" \ + --data-value-yaml skipper.version="$SKIPPER_VERSION" \ + --data-value-yaml skipper.repository="$SKIPPER_REPOSITORY" \ + --data-value-yaml grafana.version="$DATAFLOW_VERSION" \ + --data-value-yaml package.name="$PACKAGE_NAME" \ + --file-mark 'config/values.yml:type=text-template' \ + --file-mark '.imgpkg/bundle.yaml:type=text-template' +echo "ytt -f $IMGPKG_LOCK_TEMPLATE" >> times.txt +$MEASURE ytt -f "$IMGPKG_LOCK_TEMPLATE" \ + --output-files "$IMGPKG_LOCK_GENERATED_IN" \ + --data-value-yaml server.version="$SERVER_VERSION" \ + --data-value-yaml server.repository="$SERVER_REPOSITORY" \ + --data-value-yaml ctr.version="$DATAFLOW_VERSION" \ + --data-value-yaml dataflow.version="$DATAFLOW_VERSION" \ + --data-value-yaml skipper.version="$SKIPPER_VERSION" \ + --data-value-yaml skipper.repository="$SKIPPER_REPOSITORY" \ + --data-value-yaml grafana.version="$DATAFLOW_VERSION" \ + --file-mark '**/*.yml:type=text-template' + +mkdir -p "$PACKAGE_BUNDLE_GENERATED/config/upstream" +cp -R "$VENDIR_SRC_IN" "$PACKAGE_BUNDLE_GENERATED/config/upstream" +echo "vendir -f $IMGPKG_LOCK_TEMPLATE" >> times.txt +$MEASURE vendir sync --chdir "$PACKAGE_BUNDLE_GENERATED" +mkdir -p "$IMGPKG_LOCK_GENERATED_OUT" + +for DIR in $(ls $IMGPKG_LOCK_GENERATED_IN); do + echo "ytt for $DIR" >> times.txt + $MEASURE ytt -f "$PACKAGE_BUNDLE_GENERATED" -f "$IMGPKG_LOCK_GENERATED_IN/$DIR" > "$IMGPKG_LOCK_GENERATED_OUT/$DIR.yml" +done + +mkdir -p "$PACKAGE_BUNDLE_GENERATED/.imgpkg" +echo "kbld -f $IMGPKG_LOCK_GENERATED_OUT" >> times.txt +$MEASURE kbld -f "$IMGPKG_LOCK_GENERATED_OUT" \ + --imgpkg-lock-output "$PACKAGE_BUNDLE_GENERATED/.imgpkg/images.yml" + +cat times.txt \ No newline at end of file diff --git a/.github/actions/build-repository-bundle/action.yml b/.github/actions/build-repository-bundle/action.yml new file mode 100644 index 0000000000..7b48b64689 --- /dev/null +++ b/.github/actions/build-repository-bundle/action.yml @@ -0,0 +1,59 @@ +name: Build Repository Bundle +description: Build Repository Bundle +inputs: + version: + description: dataflow version + required: true + repo-bundle-template: + description: path to repo bundle template + required: true + package-bundle-repository: + description: repository for package bundles + required: true + package-name: + description: package name + required: true + project-directory: + description: The working directory + required: true + SRP_CLIENT_ID: + description: secrets.SRP_CLIENT_ID + required: false + SRP_CLIENT_SECRET: + description: secrets.SRP_CLIENT_SECRET + required: false +outputs: + bundle-path: + description: Location path where bundle was build + value: ${{ steps.tanzu-dance.outputs.bundle-path }} + +runs: + using: "composite" + steps: + - name: Tanzu Dance + id: tanzu-dance + env: + PACKAGE_VERSION: ${{ inputs.version }} + PACKAGE_BUNDLE_REPOSITORY: ${{ inputs.package-bundle-repository }} + PACKAGE_NAME: ${{ inputs.package-name }} + REPO_BUNDLE_TEMPLATE: ${{ inputs.repo-bundle-template }} + REPO_BUNDLE_RENDERED: ${{ runner.temp }}/generated/reporendered + REPO_BUNDLE_GENERATED: ${{ runner.temp }}/generated/repobundle + SRP_CLIENT_ID: ${{ inputs.SRP_CLIENT_ID }} + SRP_CLIENT_SECRET: ${{ inputs.SRP_CLIENT_SECRET }} + shell: bash + working-directory: ${{ inputs.project-directory }} + run: | + echo "bundle-path=$REPO_BUNDLE_GENERATED" >> $GITHUB_OUTPUT + SCDF_DIR="${{ inputs.project-directory || '.' }}" + if [ "$USE_SRP" == "true" ]; then + if [ "$SRP_SCRIPTS" == "" ]; then + echo "SRP_SCRIPTS not defined" + exit 2 + fi + export OBSERVATION=repository-bundle + export OUTPUT_VERSION=$PACKAGE_VERSION + $SRP_SCRIPTS/run-under-observer.sh "${{ github.action_path }}/build-repository-bundle.sh" + else + "${{ github.action_path }}/build-repository-bundle.sh" + fi diff --git a/.github/actions/build-repository-bundle/build-repository-bundle.sh b/.github/actions/build-repository-bundle/build-repository-bundle.sh new file mode 100644 index 0000000000..b090ba5479 --- /dev/null +++ b/.github/actions/build-repository-bundle/build-repository-bundle.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +function check_env() { + eval ev='$'$1 + if [ "$ev" == "" ]; then + echo "env var $1 not defined" + if ((sourced != 0)); then + return 1 + else + exit 1 + fi + fi +} + +TMP=$(mktemp -d) +if [ "$REPO_BUNDLE_GENERATED" = "" ]; then + export REPO_BUNDLE_GENERATED="$TMP/generated/repobundle" +fi +mkdir -p $REPO_BUNDLE_GENERATED/packages +mkdir -p $REPO_BUNDLE_GENERATED/.imgpkg + +if [ "$REPO_BUNDLE_RENDERED" = "" ]; then + export REPO_BUNDLE_RENDERED="$TMP/generated/reporendered" +fi +mkdir -p "$REPO_BUNDLE_RENDERED" + +check_env REPO_BUNDLE_TEMPLATE +check_env REPO_BUNDLE_RENDERED +check_env PACKAGE_VERSION +check_env PACKAGE_BUNDLE_REPOSITORY +check_env PACKAGE_NAME + +echo "Build Repository Bundle: $REPO_BUNDLE_TEMPLATE, project.version=$PACKAGE_VERSION, package.name=$PACKAGE_NAME, repository=$PACKAGE_BUNDLE_REPOSITORY, output=$REPO_BUNDLE_RENDERED" + +set -e + +ytt \ + -f $REPO_BUNDLE_TEMPLATE \ + --output-files $REPO_BUNDLE_RENDERED \ + --data-value-yaml project.version=$PACKAGE_VERSION \ + --data-value-yaml repository=$PACKAGE_BUNDLE_REPOSITORY \ + --data-value-yaml package.name=$PACKAGE_NAME \ + --data-value-yaml package.timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ") \ + --file-mark 'package.yml:type=text-plain' \ + --file-mark 'metadata.yml:type=text-plain' \ + --file-mark 'values-schema.yml:type=text-plain' \ + --file-mark 'values-schema.star:type=text-plain' \ + --file-mark 'values-schema.star:for-output=true' \ + --file-mark 'versions.yml:type=text-template' + +ytt \ + -f $REPO_BUNDLE_RENDERED \ + --file-mark 'values-schema.yml:type=data' \ + > $REPO_BUNDLE_GENERATED/packages/packages.yml + +kbld \ + --file $REPO_BUNDLE_GENERATED/packages \ + --imgpkg-lock-output $REPO_BUNDLE_GENERATED/.imgpkg/images.yml diff --git a/.github/actions/install-xmlutils/action.yml b/.github/actions/install-xmlutils/action.yml new file mode 100644 index 0000000000..14fc435087 --- /dev/null +++ b/.github/actions/install-xmlutils/action.yml @@ -0,0 +1,11 @@ +name: 'Install xsltproc' +description: 'Install xsltproc' + +runs: + using: composite + steps: + - name: 'Install xmlutils' + shell: bash + run: | + sudo apt-get update -q -y + sudo apt-get install -q -y xsltproc libxml2-utils diff --git a/.github/actions/publish-bundle/action.yml b/.github/actions/publish-bundle/action.yml new file mode 100644 index 0000000000..b150fafa0d --- /dev/null +++ b/.github/actions/publish-bundle/action.yml @@ -0,0 +1,54 @@ +name: Publish Bundle +description: Publish Bundle +inputs: + path: + description: path to bundle files + required: true + repository: + description: repository to publish + required: true + project-directory: + description: The working directory + required: true + version: + description: semver version + required: true + SRP_CLIENT_ID: + description: secrets.SRP_CLIENT_ID + required: false + SRP_CLIENT_SECRET: + description: secrets.SRP_CLIENT_SECRET + required: false +runs: + using: "composite" + steps: + - name: Setup Envs + shell: bash + run: | + echo RTAG=R$(date +%y%m%d%s%N) >> $GITHUB_ENV + - name: Tanzu Dance + id: tanzu-dance + env: + BUNDLE_PATH: ${{ inputs.path }} + REPOSITORY: ${{ inputs.repository }} + VERSION: ${{ inputs.version }} + SRP_CLIENT_ID: ${{ inputs.SRP_CLIENT_ID }} + SRP_CLIENT_SECRET: ${{ inputs.SRP_CLIENT_SECRET }} + shell: bash + working-directory: ${{ inputs.project-directory }} + run: | + if [ "$USE_SRP" == "true" ]; then + if [ "$SRP_SCRIPTS" == "" ]; then + echo "SRP_SCRIPTS not defined" + exit 2 + fi + if [[ "$REPOSITORY" == *"-package"* ]]; then + export OBSERVATION=package-publish + else + export OBSERVATION=repository-publish + fi + export OUTPUT_VERSION=$VERSION + $SRP_SCRIPTS/run-under-observer.sh "${{ github.action_path }}/publish-bundle.sh" + else + "${{ github.action_path }}/publish-bundle.sh" + fi diff --git a/.github/actions/publish-bundle/publish-bundle.sh b/.github/actions/publish-bundle/publish-bundle.sh new file mode 100644 index 0000000000..a6613d697e --- /dev/null +++ b/.github/actions/publish-bundle/publish-bundle.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -e +IMG_PKG_OPT= +if [ "$USE_SRP" == "true" ]; then + IMG_PKG_OPT="--debug" + if [ "$SSL_CERT_FILE" != "" ] && [ -f "$SSL_CERT_FILE" ]; then + IMG_PKG_OPT="$IMG_PKG_OPT --registry-ca-cert-path $SSL_CERT_FILE" + else + IMG_PKG_OPT="$IMG_PKG_OPT --registry-verify-certs=false" + fi +fi +if [ "$IMG_PKG_OPT" != "" ]; then + echo "IMG_PKG_OPT=$IMG_PKG_OPT" +fi +set +e +IMAGE_ID=$(docker images --digests --format json | jq -r --arg IMAGE_URL "$REPOSITORY" --arg TAG "$VERSION" 'select(.Repository == $IMAGE_URL and .Tag == $TAG)' | jq -r --slurp 'map({ID: .ID}) | unique | .[] | .ID') +if [ "$IMAGE_ID" != "" ]; then + echo "Removing all images with ID=$IMAGE_ID for $REPOSITORY" + docker images --digests | grep -F "$IMAGE_ID" + docker rmi --force $IMAGE_ID +fi +set -e +imgpkg push $IMG_PKG_OPT --bundle "$REPOSITORY:$VERSION" --file "$BUNDLE_PATH" diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 0000000000..0c4b142e9a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,2 @@ +require: + members: false diff --git a/.github/labels-manage.yml b/.github/labels-manage.yml deleted file mode 100644 index 03b3f81be2..0000000000 --- a/.github/labels-manage.yml +++ /dev/null @@ -1,196 +0,0 @@ -- name: area/batch-task - color: F9D0C4 - description: Belongs to batch and task -- name: area/composed-tasks - color: F9D0C4 - description: Belongs to ctr -- name: area/dependencies - color: F9D0C4 - description: Belongs project dependencies -- name: area/docker - color: F9D0C4 - description: Belongs to docker -- name: area/dsl - color: F9D0C4 - description: Belongs to dsl -- name: area/documentation - color: F9D0C4 - description: Belongs to documentation -- name: area/fan-in-fan-out - color: F9D0C4 - description: Belongs Fan -- name: area/flo-scdf-integration - color: F9D0C4 - description: Belongs to Flo -- name: area/helm-charts - color: F9D0C4 - description: Belongs to helm -- name: area/micrometer - color: F9D0C4 - description: Belongs to micrometer -- name: area/performance-optimization - color: F9D0C4 - description: Belongs to performance -- name: area/security - color: F9D0C4 - description: Belongs to security -- name: area/skipper - color: F9D0C4 - description: Belongs to skipper -- name: area/task-orchestration - color: F9D0C4 - description: Belongs to task orchestration -- name: area/task-scheduler - color: F9D0C4 - description: Belongs to task scheduling -- name: area/tests - color: F9D0C4 - description: Belongs to tests - -- name: for/angular4-upgrade - color: E99695 - description: For Angular 4 update -- name: for/backport - color: E99695 - description: For backporting -- name: for/blocker - color: E99695 - description: For blocking -- name: for/composed-tasks - color: E99695 - description: For Composed Tasks -- name: for/fan-in-fan-out - color: E99695 - description: For Fan -- name: for/flo-scdf-integration - color: E99695 - description: For Flow integration -- name: for/marketing - color: E99695 - description: For marketing -- name: for/spike - color: E99695 - description: For spike -- name: for/team-attention - color: E99695 - description: For team attention -- name: for/ux-improvement - color: E99695 - description: For UX improvement - -- name: status/complete - color: FEF2C0 - description: Issue is now complete -- name: status/declined - color: FEF2C0 - description: Issue has been declined -- name: status/duplicate - color: FEF2C0 - description: There were an existing issue -- name: status/in-progress - color: FEF2C0 - description: Something is happening -- name: status/invalid - color: FEF2C0 - description: Mistake, bogus, old, bye bye -- name: status/need-design - color: FEF2C0 - description: Vague so need some proper design -- name: status/need-feedback - color: FEF2C0 - description: Calling participant to provide feedback -- name: status/need-investigation - color: FEF2C0 - description: Oh need to look under a hood -- name: status/need-triage - color: FEF2C0 - description: Team needs to triage and take a first look -- name: status/on-hold - color: FEF2C0 - description: For various reasons is on hold -- name: status/stale - color: FEF2C0 - description: Marked as stale -- name: status/closed-as-stale - color: FEF2C0 - description: Closed as has been stale - -- name: type/automated-pr - color: D4C5F9 - description: Is an automated pr -- name: type/backport - color: D4C5F9 - description: Is a issue to track backport, use with branch/xxx -- name: type/bug - color: D4C5F9 - description: Is a bug report -- name: type/enhancement - color: D4C5F9 - description: Is an enhancement request -- name: type/epic - color: D4C5F9 - description: Collection of issues -- name: type/feature - color: D4C5F9 - description: Is a feature request -- name: type/help-needed - color: D4C5F9 - description: Calling help -- name: type/idea - color: D4C5F9 - description: Is just an idea -- name: type/task - color: D4C5F9 - description: Something needs to get done -- name: type/technical-debt - color: D4C5F9 - description: Techical Dept -- name: type/question - color: D4C5F9 - description: Is a question - -- name: branch/1.2.x - color: BFDADC - description: Issue for a branch -- name: branch/1.3.x - color: BFDADC - description: Issue for a branch -- name: branch/1.4.x - color: BFDADC - description: Issue for a branch -- name: branch/1.5.x - color: BFDADC - description: Issue for a branch -- name: branch/1.6.x - color: BFDADC - description: Issue for a branch -- name: branch/1.7.x - color: BFDADC - description: Issue for a branch -- name: branch/2.0.x - color: BFDADC - description: Issue for a branch -- name: branch/2.1.x - color: BFDADC - description: Issue for a branch -- name: branch/2.2.x - color: BFDADC - description: Issue for a branch -- name: branch/2.3.x - color: BFDADC - description: Issue for a branch -- name: branch/2.4.x - color: BFDADC - description: Issue for a branch -- name: branch/2.5.x - color: BFDADC - description: Issue for a branch -- name: branch/2.6.x - color: BFDADC - description: Issue for a branch -- name: branch/2.7.x - color: BFDADC - description: Issue for a branch -- name: branch/2.8.x - color: BFDADC - description: Issue for a branch diff --git a/.github/release-files-spec.json b/.github/release-files-spec.json new file mode 100644 index 0000000000..4ceba9f6d9 --- /dev/null +++ b/.github/release-files-spec.json @@ -0,0 +1,28 @@ +{ + "files": [ + { + "aql": { + "items.find": { + "$and": [ + { + "@build.name": "${buildname}", + "@build.number": "${buildnumber}", + "path": {"$match": "org*"} + }, + { + "$or": [ + { + "name": {"$match": "*.pom"} + }, + { + "name": {"$match": "*.jar"} + } + ] + } + ] + } + }, + "target": "nexus/" + } + ] +} diff --git a/.github/rlnotes.mustache b/.github/rlnotes.mustache new file mode 100644 index 0000000000..4c59c73f18 --- /dev/null +++ b/.github/rlnotes.mustache @@ -0,0 +1,33 @@ +{{#headerslength}} +# Generic Notes +{{/headerslength}} + +{{#headers}} +**{{title}}** +{{body}} + +{{/headers}} + +# Dependent Projects and Compatibility +Component | Version +--- | --- +{{projects.spring_cloud_deployer.name}}|{{projects.spring_cloud_deployer.version}} +{{projects.spring_cloud_skipper.name}}|{{projects.spring_cloud_skipper.version}} +{{projects.spring_cloud_dataflow_ui.name}}|{{projects.spring_cloud_dataflow_ui.version}} +{{projects.spring_cloud_dataflow.name}}|{{projects.spring_cloud_dataflow.version}} + +# Issues + +{{#issues}} +* {{repo}}#{{number}} {{title}} +{{/issues}} + +{{#footerslength}} +# Additional Notes +{{/footerslength}} + +{{#footers}} +**{{title}}** +{{body}} + +{{/footers}} diff --git a/.github/settings.xml b/.github/settings.xml new file mode 100644 index 0000000000..2f8628196c --- /dev/null +++ b/.github/settings.xml @@ -0,0 +1,180 @@ + + + + pr + + true + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + + + + stagingmilestone + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-staging + Spring Staging + https://repo.spring.io/staging + + false + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-staging + Spring Staging + https://repo.spring.io/libs-staging-local + + false + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + + + + stagingrelease + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-staging + Spring Staging + https://repo.spring.io/staging + + false + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-staging + Spring Staging + https://repo.spring.io/staging + + false + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + + + + diff --git a/.github/workflows/build-image.sh b/.github/workflows/build-image.sh new file mode 100755 index 0000000000..c6ee19bdb6 --- /dev/null +++ b/.github/workflows/build-image.sh @@ -0,0 +1,98 @@ +#!/usr/bin/env bash +if [ -z "$BASH_VERSION" ]; then + echo "This script requires Bash. Use: bash $0 $*" + exit 1 +fi +SCDIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +set +e +if [ "$PUSH" == "" ]; then + PUSH=true +fi +if [ "$TAG" == "" ]; then + echo "TAG not found" + exit 1 +fi +if [ "$DEFAULT_JDK" = "" ]; then + echo "DEFAULT_JDK not found using 17" + DEFAULT_JDK=17 +else + echo "DEFAULT_JDK=$DEFAULT_JDK" +fi + +function download_image() { + TARGET=$1 + ARTIFACT_ID=$2 + VERSION=$3 + TARGET_FILE=$TARGET/$ARTIFACT_ID-$VERSION.jar + pushd $SCDIR/download-jar > /dev/null || exit + ./gradlew downloadJar -PartifactId=$ARTIFACT_ID -PartifactVersion=$VERSION -PartifactPath=$TARGET + RC=$? + if((RC != 0)); then + exit $RC + fi + popd > /dev/null || exit + if [ ! -f $TARGET_FILE ]; then + echo "Cannot find $TARGET_FILE" + ls -al $TARGET + exit 2 + fi + echo "Downloaded $TARGET_FILE" +} + +TARGET=$(realpath $1) +REPO="$2" +ARTIFACT_ID=$3 + +if [ "$ARTIFACT_ID" = "" ]; then + echo "Usage: " +fi +JAR="$TARGET/$ARTIFACT_ID-$TAG.jar" +if [ ! -f "$JAR" ]; then + echo "$JAR not found downloading" + download_image "$TARGET" "$ARTIFACT_ID" "$TAG" + RC=$? + if((RC != 0)); then + exit $RC + fi +fi +# TODO add Java 21 when packeto supports it +for v in 17; do + echo "Creating: $REPO:$TAG-jdk$v" + pack build --builder gcr.io/paketo-buildpacks/builder:base \ + --path "$JAR" \ + --trust-builder --verbose \ + --env BP_JVM_VERSION=$v "$REPO:$TAG-jdk$v" + RC=$? + if((RC != 0)); then + exit $RC + fi + echo "Created: $REPO:$TAG-jdk$v" + if [ "$PUSH" == "true" ]; then + if [ "$DELETE_TAGS" == "true" ]; then + $SCDIR/docker-rm-tag.sh $REPO $TAG-jdk$v + fi + docker push "$REPO:$TAG-jdk$v" + RC=$? + if ((RC!=0)); then + exit $RC + fi + echo "Pushed $REPO:$TAG-jdk$v" + else + echo "Skipped push $REPO:$TAG-jdk$v" + fi + + if [ "$DEFAULT_JDK" == "$v" ]; then + docker tag "$REPO:$TAG-jdk$DEFAULT_JDK" "$REPO:$TAG" + if [ "$PUSH" == "true" ]; then + if [ "$DELETE_TAGS" == "true" ]; then + $SCDIR/docker-rm-tag.sh $REPO $TAG-jdk$v + fi + docker push "$REPO:$TAG" + echo "Pushed $REPO:$TAG" + else + echo "Skipped push $REPO:$TAG" + fi + fi +done + + diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml new file mode 100644 index 0000000000..4daaf51ccf --- /dev/null +++ b/.github/workflows/build-images.yml @@ -0,0 +1,117 @@ +name: build-images + +on: + workflow_call: + inputs: + version: + type: string + description: 'Version' + required: false + delete-tags: + type: boolean + default: false + description: 'Delete the image tags' + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + # test templating before publishing a package + prepare: + name: Prepare Job + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - name: Load matrix + id: matrix + shell: bash + run: | + if [ "${{ inputs.version }}" == "" ]; then + ./mvnw help:evaluate -Dexpression=project.version -s .settings.xml -B --no-transfer-progress > /dev/null + VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + echo "::notice::VERSION=$VERSION" + echo "VERSION=$VERSION" >> $GITHUB_ENV + else + echo "::notice::VERSION=${{ inputs.version }}" + echo "VERSION=${{ inputs.version }}" >> $GITHUB_ENV + fi + MATRIX=$(cat .github/workflows/images.json | jq -c) + echo "MATRIX=$MATRIX" + echo "MATRIX=$MATRIX" >> $GITHUB_ENV + outputs: + matrix: ${{ env.MATRIX }} + version: ${{ env.VERSION }} + publish: + name: Publish + runs-on: ubuntu-latest + needs: + - prepare + strategy: + matrix: ${{ fromJson(needs.prepare.outputs.matrix) }} + concurrency: + group: ${{ matrix.name }} + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - name: Install pack + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + pack-version: 0.30.0 + - name: Login to docker.io + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Login to GCR + uses: docker/login-action@v3 + with: + registry: gcr.io + username: _json_key + password: ${{ secrets.GCR_JSON_KEY }} + - name: Build and Publish ${{ matrix.name }} + shell: bash + env: + TAG: ${{ needs.prepare.outputs.version }} + DEFAULT_JDK: '17' + DELETE_TAGS: ${{ inputs.delete-tags }} + run: | + .github/workflows/build-image.sh ${{ matrix.path }} ${{ matrix.image }} ${{ matrix.name }} + build-extra: + name: Build extra images + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - name: Login to docker.io + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Publish Grafana Prometheus + uses: docker/build-push-action@v2 + with: + context: src/grafana/prometheus/docker/grafana + push: true + tags: springcloud/spring-cloud-dataflow-grafana-prometheus:${{ inputs.version }} + - name: Publish Grafana InfluxDB + uses: docker/build-push-action@v2 + with: + context: src/grafana/influxdb/docker/grafana + push: true + tags: springcloud/spring-cloud-dataflow-grafana-influxdb:${{ inputs.version }} + - name: Publish Prometheus Local + uses: docker/build-push-action@v2 + with: + context: src/grafana/prometheus/docker/prometheus-local + push: true + tags: springcloud/spring-cloud-dataflow-prometheus-local:${{ inputs.version }} diff --git a/.github/workflows/build-snapshot-controller.yml b/.github/workflows/build-snapshot-controller.yml index 4810d1e3dd..cc5d89c058 100644 --- a/.github/workflows/build-snapshot-controller.yml +++ b/.github/workflows/build-snapshot-controller.yml @@ -26,30 +26,10 @@ jobs: { "if": "initial == true", "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-build", - "ref": "master", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-build' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-common", - "ref": "master", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-common' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-deployer", - "ref": "master", + "ref": "main", "workflow": "build-snapshot-worker.yml" } }, @@ -58,69 +38,29 @@ jobs: "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", - "repo": "spring-cloud-deployer-local", - "ref": "master", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-local' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-cloudfoundry", - "ref": "master", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-cloudfoundry' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-deployer-kubernetes", - "ref": "master", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer-kubernetes' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-common-security-config", - "ref": "master", - "workflow": "build-snapshot-worker.yml" - } - }, - { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-common-security-config' && data.owner == 'spring-cloud'", - "action": "workflow_dispatch", - "workflow_dispatch": { - "owner": "spring-cloud", - "repo": "spring-cloud-skipper", - "ref": "master", + "repo": "spring-cloud-dataflow-ui", + "ref": "main", "workflow": "build-snapshot-worker.yml" } }, { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-skipper' && data.owner == 'spring-cloud'", + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", - "repo": "spring-cloud-dataflow-ui", - "ref": "master", + "repo": "spring-cloud-dataflow", + "ref": "main", "workflow": "build-snapshot-worker.yml" } }, { - "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", "action": "workflow_dispatch", "workflow_dispatch": { "owner": "spring-cloud", "repo": "spring-cloud-dataflow", - "ref": "master", - "workflow": "build-snapshot-worker.yml" + "ref": "main", + "workflow": "carvel-worker.yml" } }, { @@ -129,5 +69,12 @@ jobs: "fail": { "message": "hi, something went wrong" } + }, + { + "if": "data.event == 'carvel-failed'", + "action": "fail", + "fail": { + "message": "hi, something went wrong with carvel" + } } ] diff --git a/.github/workflows/build-snapshot-worker.yml b/.github/workflows/build-snapshot-worker.yml index b0f8b98cbc..78fd3d996c 100644 --- a/.github/workflows/build-snapshot-worker.yml +++ b/.github/workflows/build-snapshot-worker.yml @@ -8,101 +8,192 @@ on: description: 'Build Zoo Handler Payload' required: true +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 - with: - java-version: 1.8 - - uses: jvalkeal/setup-maven@v1 - with: - maven-version: 3.6.3 - - uses: jfrog/setup-jfrog-cli@v1 + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - name: 'Install: xmllint' + uses: ./.github/actions/install-xmlutils + - uses: actions/setup-java@v3 with: - version: 1.43.2 - env: - JF_ARTIFACTORY_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} - - name: Install pack - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - pack-version: 0.18.0 - - # cache maven .m2 - - uses: actions/cache@v1 + java-version: '17' + distribution: 'liberica' + - uses: actions/cache@v3 with: - path: .m2 + path: ~/.m2/repository key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: | ${{ runner.os }}-m2- - - # target deploy repos + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} - name: Configure JFrog Cli run: | jfrog rt mvnc \ - --server-id-deploy=repo.spring.io \ - --repo-deploy-releases=release \ - --repo-deploy-snapshots=snapshot + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main >> $GITHUB_ENV echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - - # zoo extract and ensure - name: Extract Zoo Context Properties uses: jvalkeal/build-zoo-handler@v0.0.4 with: dispatch-handler-extract-context-properties: true - - # build and publish to configured target - name: Build and Publish + shell: bash + timeout-minutes: 75 run: | - jfrog rt mvn "-Dmaven.repo.local=.m2" -U -B clean install - jfrog rt build-publish - echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV + ./mvnw --help + ./spring-cloud-dataflow-package/set-package-version.sh + ./mvnw -s .settings.xml package -DskipTests -T 1C --no-transfer-progress + jfrog rt mvn -s .settings.xml install -Pdocs -B --no-transfer-progress + jfrog rt mvn -s .settings.xml install -pl spring-cloud-dataflow-package -B --no-transfer-progress + jfrog rt build-publish + PROJECT_VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$PROJECT_VERSION >> $GITHUB_ENV echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main >> $GITHUB_ENV echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV - - # build images to local repo to get pushed - - name: Build Image - env: - TAG: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} - run: | - pack build \ - --path spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-$TAG.jar \ - --builder gcr.io/paketo-buildpacks/builder:base \ - --env BP_JVM_VERSION=8 springsource-docker-private-local.jfrog.io/spring-cloud-dataflow-server:$TAG - pack build \ - --path spring-cloud-dataflow-composed-task-runner/target/spring-cloud-dataflow-composed-task-runner-$TAG.jar \ - --builder gcr.io/paketo-buildpacks/builder:base \ - --env BP_JVM_VERSION=8 springsource-docker-private-local.jfrog.io/spring-cloud-dataflow-composed-task-runner:$TAG - - # push images to private repo - - name: Push image - env: - TAG: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} - run: | - jfrog rt docker-push springsource-docker-private-local.jfrog.io/spring-cloud-dataflow-server:$TAG docker-private-local - jfrog rt docker-push springsource-docker-private-local.jfrog.io/spring-cloud-dataflow-composed-task-runner:$TAG docker-private-local - - # zoo success - - name: Notify Build Success Zoo Handler Controller - uses: jvalkeal/build-zoo-handler@v0.0.4 + echo BUILD_ZOO_HANDLER_spring_cloud_skipper_version=$(./mvnw help:evaluate -Dexpression=spring-cloud-skipper.version -pl spring-cloud-dataflow-parent -q -DforceStdout) >> $GITHUB_ENV +# echo "Determine project version" +# set +e +# echo "::info ::Project version=$PROJECT_VERSION" +# SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-snapshot-local) +# if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then +# echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# else +# echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# jfrog rt sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# fi + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "build-succeed" - } + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + outputs: + version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }} + BUILD_ZOO_HANDLER_spring_cloud_skipper_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }} + database-tests: + if: github.repository_owner == 'spring-cloud' + runs-on: ubuntu-latest + strategy: + matrix: + db: [ 'ORACLE', 'DB2' ] + steps: + - uses: actions/checkout@v4 + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2-${{ matrix.db }} + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - uses: ./.github/actions/install-xmlutils + - name: Test + shell: bash + timeout-minutes: 75 + run: | + ./mvnw clean install -s .settings.xml -DskipTests -am -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -B --no-transfer-progress + export ENABLE_${{ matrix.db }}=true + ./mvnw test -s .settings.xml -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -Dgroups=${{ matrix.db }} -B --no-transfer-progress + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Capture Test Results + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.db }}-test-results + path: '**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + # clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + images: + name: Build and Publish Images + needs: [ build ] + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.build.outputs.version }} + secrets: inherit - # zoo failure - - name: Notify Build Failure Zoo Handler Controller - if: ${{ failure() }} - uses: jvalkeal/build-zoo-handler@v0.0.4 - with: - dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} - dispatch-handler-client-payload-data: > - { - "event": "build-failed", - "message": "spring-cloud-dataflow failed" - } + wrap: + needs: [ build, images, database-tests ] + runs-on: ubuntu-latest + steps: + - name: Save env + shell: bash + if: ${{ success() }} + run: | + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_skipper_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }}" >> $GITHUB_ENV +# zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-succeed" + } +# zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-failed", + "message": "spring-cloud-dataflow failed" + } diff --git a/.github/workflows/build-uaa-test.yml b/.github/workflows/build-uaa-test.yml new file mode 100644 index 0000000000..e4f16c1d34 --- /dev/null +++ b/.github/workflows/build-uaa-test.yml @@ -0,0 +1,41 @@ +name: build-uaa-test + +on: + workflow_dispatch: + +jobs: + # test templating before publishing a package + prepare: + name: Prepare Job + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + with: + repository: cloudfoundry/uaa + ref: '4.32.0' + path: src/docker/uaa/uaa + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - name: Build UAA Test Image + shell: bash + env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + working-directory: ./src/docker/uaa + run: ./build-uaa + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Publish Test Image + uses: docker/build-push-action@v6 + with: + context: ./src/docker/uaa + push: true + tags: springcloud/scdf-uaa-test:4.32 diff --git a/.github/workflows/carvel-worker.yml b/.github/workflows/carvel-worker.yml new file mode 100644 index 0000000000..86d7defe70 --- /dev/null +++ b/.github/workflows/carvel-worker.yml @@ -0,0 +1,71 @@ +name: Carvel Worker + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +jobs: + prepare: + name: Prepare + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: BUILD_ZOO_HANDLER_spring_cloud_dataflow_version + outputs: + dataflow-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + skipper-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + server-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + ctr-version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + publish: + name: Publish + needs: + - prepare + uses: ./.github/workflows/common-carvel.yml + with: + package-name: 'scdf' + package-version: ${{ needs.prepare.outputs.server-version }} + package-bundle: 'springcloud/scdf-oss-package' + repository-bundle: 'springcloud/scdf-oss-repo' + dataflow-version: ${{ needs.prepare.outputs.dataflow-version }} + server-version: ${{ needs.prepare.outputs.server-version }} + ctr-version: ${{ needs.prepare.outputs.dataflow-version }} + skipper-version: ${{ needs.prepare.outputs.skipper-version }} + server-repository: 'springcloud/spring-cloud-dataflow-server' + skipper-repository: 'springcloud/spring-cloud-skipper-server' + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + finalize: + name: Finalize + runs-on: ubuntu-latest + needs: + - publish + steps: + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "carvel-succeed" + } + + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "carvel-failed" + } diff --git a/.github/workflows/cental-sync.yml b/.github/workflows/cental-sync.yml new file mode 100644 index 0000000000..5d794369ab --- /dev/null +++ b/.github/workflows/cental-sync.yml @@ -0,0 +1,55 @@ +name: Central Sync + +on: + workflow_dispatch: + inputs: + buildName: + description: "Artifactory build name" + required: true + buildNumber: + description: "Artifactory build number" + required: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + + # to get spec file in .github + - uses: actions/checkout@v4 + + # Setup jfrog cli + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + + + # Extract build id from input + - name: Extract Build Id + run: | + echo JFROG_CLI_BUILD_NAME=${{ github.event.inputs.buildName }} >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=${{ github.event.inputs.buildNumber }} >> $GITHUB_ENV + + # Download released files + - name: Download Release Files + run: | + jfrog rt download \ + --spec .github/release-files-spec.json \ + --spec-vars "buildname=$JFROG_CLI_BUILD_NAME;buildnumber=$JFROG_CLI_BUILD_NUMBER" + + # Create checksums, signatures and create staging repo on central and upload + - uses: jvalkeal/nexus-sync@v0 + with: + url: ${{ secrets.OSSRH_URL }} + username: ${{ secrets.OSSRH_S01_TOKEN_USERNAME }} + password: ${{ secrets.OSSRH_S01_TOKEN_PASSWORD }} + staging-profile-name: ${{ secrets.OSSRH_STAGING_PROFILE_NAME }} + create: true + upload: true + close: true + release: true + generate-checksums: true + pgp-sign: true + pgp-sign-passphrase: ${{ secrets.GPG_PASSPHRASE }} + pgp-sign-private-key: ${{ secrets.GPG_PRIVATE_KEY }} diff --git a/.github/workflows/central-release.yml b/.github/workflows/central-release.yml new file mode 100644 index 0000000000..080596238b --- /dev/null +++ b/.github/workflows/central-release.yml @@ -0,0 +1,85 @@ +name: Central Release + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +jobs: + central: + runs-on: ubuntu22-8-32-OSS + environment: central + steps: + + # to get spec file in .github + - uses: actions/checkout@v4 + + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname + BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber + + # Download released files + - name: Download Release Files + run: | + jfrog rt download \ + --spec .github/release-files-spec.json \ + --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber" + jfrog rt download \ + --spec .github/release-files-spec.json \ + --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber" + jfrog rt download \ + --spec .github/release-files-spec.json \ + --spec-vars "buildname=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname;buildnumber=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber" + + # Create checksums, signatures and create staging repo on central and upload + - uses: jvalkeal/nexus-sync@v0 + with: + url: ${{ secrets.OSSRH_URL }} + username: ${{ secrets.OSSRH_S01_TOKEN_USERNAME }} + password: ${{ secrets.OSSRH_S01_TOKEN_PASSWORD }} + staging-profile-name: ${{ secrets.OSSRH_STAGING_PROFILE_NAME }} + create: true + upload: true + close: true + release: true + generate-checksums: true + pgp-sign: true + pgp-sign-passphrase: ${{ secrets.GPG_PASSPHRASE }} + pgp-sign-private-key: ${{ secrets.GPG_PRIVATE_KEY }} + + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "central-succeed" + } + + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.REPO_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "central-failed" + } diff --git a/.github/workflows/ci-carvel.yml b/.github/workflows/ci-carvel.yml new file mode 100644 index 0000000000..b114c8d9e9 --- /dev/null +++ b/.github/workflows/ci-carvel.yml @@ -0,0 +1,90 @@ +name: CI Carvel + +on: + push: + branches: + - main + paths: + - 'src/carvel/**' + workflow_dispatch: + inputs: + branch: + description: 'Branch or tag to use to determine version numbers' + package_version: + description: 'Package version to publish. If blank will match dataflow version' + required: false + +jobs: + # test templating before publishing a package + prepare: + name: Prepare + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch && inputs.branch || github.ref }} + path: 'target' + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-node@v2 + with: + node-version: 16 + - uses: carvel-dev/setup-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + - name: Setup npm + shell: bash + working-directory: src/carvel + run: npm install + - name: npm ci + shell: bash + working-directory: src/carvel + run: npm ci + - name: Lint + shell: bash + working-directory: src/carvel + run: npm run format-check + - name: Test + shell: bash + working-directory: src/carvel + run: npm test + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Configure Env + run: | + ROOT=$(realpath $PWD) + pushd target + source $ROOT/.github/workflows/export-app-versions.sh + echo DATAFLOW_VERSION=$DATAFLOW_VERSION >> $GITHUB_ENV + echo SKIPPER_VERSION=$SKIPPER_VERSION >> $GITHUB_ENV + if [ "${{ inputs.package_version }}" != "" ]; then + echo PACKAGE_VERSION=${{ inputs.package_version }} >> $GITHUB_ENV + else + echo PACKAGE_VERSION=$DATAFLOW_VERSION >> $GITHUB_ENV + fi + popd + outputs: + dataflow-version: ${{ env.DATAFLOW_VERSION }} + skipper-version: ${{ env.SKIPPER_VERSION }} + package-version: ${{ env.PACKAGE_VERSION }} + publish: + name: Publish + needs: + - prepare + uses: ./.github/workflows/common-carvel.yml + with: + package-name: 'scdf' + package-version: ${{ needs.prepare.outputs.package-version }} + server-repository: 'springcloud/spring-cloud-dataflow-server' + skipper-repository: 'springcloud/spring-cloud-skipper-server' + package-bundle: 'springcloud/scdf-oss-package' + repository-bundle: 'springcloud/scdf-oss-repo' + dataflow-version: ${{ needs.prepare.outputs.dataflow-version }} + server-version: ${{ needs.prepare.outputs.dataflow-version }} + ctr-version: ${{ needs.prepare.outputs.dataflow-version }} + skipper-version: ${{ needs.prepare.outputs.skipper-version }} + secrets: + DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/ci-images.yml b/.github/workflows/ci-images.yml new file mode 100644 index 0000000000..de66450110 --- /dev/null +++ b/.github/workflows/ci-images.yml @@ -0,0 +1,52 @@ +name: CI - Images + +on: + workflow_dispatch: + inputs: + version: + type: string + description: 'Version to build' + required: false + delete-tags: + required: false + type: boolean + default: false + description: 'Delete the image tags' + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + prepare: + name: Prepare Build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + if: ${{ inputs.version == null || inputs.version == '' }} + - uses: actions/setup-java@v3 + if: ${{ inputs.version == null || inputs.version == '' }} + with: + java-version: '17' + distribution: 'liberica' + - name: Version from POM + if: ${{ inputs.version == null || inputs.version == '' }} + shell: bash + run: | + ./mvnw --version + ./mvnw help:evaluate -s .settings.xml -Dexpression=project.version --no-transfer-progress > /dev/null + echo "VERSIONS=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout)" >> $GITHUB_ENV + - name: Version from Input + if: ${{ inputs.version != null && inputs.version != '' }} + shell: bash + run: echo "VERSION=${{ inputs.version }}" >> $GITHUB_ENV + outputs: + version: ${{ env.VERSION }} + images: + name: Build and Publish Images + needs: [ prepare ] + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.prepare.outputs.version }} + delete-tags: ${{ inputs.delete-tags == 'true' }} + secrets: inherit \ No newline at end of file diff --git a/.github/workflows/ci-it-db.yml b/.github/workflows/ci-it-db.yml new file mode 100644 index 0000000000..af2ab18068 --- /dev/null +++ b/.github/workflows/ci-it-db.yml @@ -0,0 +1,131 @@ +name: CI IT Database + +on: + workflow_dispatch: + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + integration-test: + name: DB IT on ${{ matrix.group }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + group: [mariadb, postgres] + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: 'Action: Run Db IT' + run: | + ./mvnw clean install -DskipTests -T 1C -s .settings.xml -pl spring-cloud-dataflow-server -am -B --no-transfer-progress + ./mvnw \ + -s .settings.xml \ + -pl spring-cloud-dataflow-server \ + -Dgroups=${{ matrix.group }} \ + -Pfailsafe \ + --batch-mode --no-transfer-progress \ + verify + - name: Integration Test Report for ${{ matrix.group }} + id: test_report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Integration Tests - ${{ matrix.group }} + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Publish Test Url for ${{ matrix.group }} + shell: bash + run: | + echo "::info ::Test report for ${{ matrix.group }} published at ${{ steps.test_report.outputs.url_html }}" + - name: 'Action: Upload Unit Test Results' + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.group }}-test-results-surefire + path: './**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + - name: 'Action: Upload Integration Test Results' + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.group }}-test-results-failsafe + path: './**/target/failsafe-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + large-container-tests: + strategy: + matrix: + database: ['DB2', 'ORACLE'] + name: ${{ matrix.database }} Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: 'Action: Run ${{ matrix.database }} Tests' + run: | + export ENABLE_${{ matrix.database }}=true + ./mvnw clean install -DskipTests -T 1C -s .settings.xml -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -am -B --no-transfer-progress + ./mvnw \ + -s .settings.xml \ + -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server \ + -Dgroups=${{ matrix.database }} \ + --batch-mode --no-transfer-progress \ + test + - name: Test Report for ${{ matrix.database }} + id: test_report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Tests - ${{ matrix.database }} + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Publish Test Url for ${{ matrix.database }} + shell: bash + run: | + echo "::info ::Test report for ${{ matrix.database }} published at ${{ steps.test_report.outputs.url_html }}" + - name: 'Action: Upload Unit Test Results' + if: ${{ success() || failure() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.database }}-test-results-surefire + path: './**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + completed: + runs-on: ubuntu-latest + needs: [ large-container-tests, integration-test ] + steps: + - name: 'Done' + shell: bash + run: echo "::info ::Done" diff --git a/.github/workflows/ci-it-performance.yml b/.github/workflows/ci-it-performance.yml new file mode 100644 index 0000000000..c7ff6727d4 --- /dev/null +++ b/.github/workflows/ci-it-performance.yml @@ -0,0 +1,63 @@ +name: CI IT Performance + +on: + workflow_dispatch: + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + test: + name: Performance IT + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Run Performance IT + run: | + ./mvnw clean install -DskipTests -T 1C -s .settings.xml -pl spring-cloud-dataflow-server -am -B --no-transfer-progress + ./mvnw -s .settings.xml \ + -pl spring-cloud-dataflow-server \ + -Dgroups=performance \ + -Pfailsafe \ + --batch-mode --no-transfer-progress \ + verify + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Integration Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: 'Action: Upload Unit Test Results' + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.group }}-test-results-surefire + path: './**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + - name: 'Action: Upload Integration Test Results' + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.group }}-test-results-failsafe + path: './**/target/failsafe-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore diff --git a/.github/workflows/ci-it-security.yml b/.github/workflows/ci-it-security.yml new file mode 100644 index 0000000000..2638739ce2 --- /dev/null +++ b/.github/workflows/ci-it-security.yml @@ -0,0 +1,55 @@ +name: CI IT Security + +on: + workflow_dispatch: + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + test: + name: Security IT + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Run Security IT + shell: bash + run: | + ./mvnw -s .settings.xml \ + -pl spring-cloud-dataflow-server \ + -Dgroups=oauth \ + -Pfailsafe \ + --batch-mode --no-transfer-progress \ + test + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Integration Tests + path: '**/surefire-reports/*IT.xml' + reporter: java-junit + list-tests: failed + - name: 'Action: Upload Integration Test Results' + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.group }}-test-results-failsafe + path: './**/target/failsafe-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore diff --git a/.github/workflows/ci-pr.yml b/.github/workflows/ci-pr.yml new file mode 100644 index 0000000000..d211270c47 --- /dev/null +++ b/.github/workflows/ci-pr.yml @@ -0,0 +1,49 @@ +name: CI PRs + +on: + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - name: Build + shell: bash + timeout-minutes: 75 + run: | + ./mvnw -s .github/settings.xml -B -Pdocs clean install --no-transfer-progress -T 1C + - name: Install XML Utils + if: ${{ failure() }} + uses: ./.github/actions/install-xmlutils + - name: Test Errors Report + if: ${{ failure() }} + shell: bash + run: ./src/scripts/print-test-errors.sh + + scan: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run Trivy vulnerability scanner in repo mode + uses: aquasecurity/trivy-action@master + with: + scan-type: 'fs' + ignore-unfixed: true + format: 'table' + severity: 'CRITICAL,HIGH' + - name: 'Scanned' + shell: bash + run: echo "::info ::Scanned" + done: + runs-on: ubuntu-latest + needs: [ scan, build ] + steps: + - name: 'Done' + shell: bash + run: echo "::info ::Done" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cae489979a..a2b07393fa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,35 +1,205 @@ -name: Java CI - master +name: CI on: + workflow_dispatch: + inputs: + enableSecurityScan: + type: boolean + default: true + description: 'Enable security scan with Trivy' push: branches: - - master + - 'main' paths-ignore: - '.github/**' - pull_request: - branches: - - master + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db,aquasec/trivy-db,ghcr.io/aquasecurity/trivy-db + TRIVY_JAVA_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-java-db,aquasec/trivy-java-db,ghcr.io/aquasecurity/trivy-java-db jobs: build: - - runs-on: ${{ matrix.os }} + if: github.repository_owner == 'spring-cloud' + runs-on: ubuntu-latest + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/install-xmlutils + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; +# cache maven repo + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2- +# jdk8 + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + echo "::notice::$(./mvnw --version)" +# build and publish + - name: Build and Publish + shell: bash + timeout-minutes: 75 + run: | + ./mvnw help:evaluate -s .settings.xml -Dexpression=project.version -B --no-transfer-progress + spring_cloud_dataflow_version=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + echo spring_cloud_dataflow_version="$spring_cloud_dataflow_version" >> $GITHUB_ENV + export spring_cloud_dataflow_version + ./spring-cloud-dataflow-package/set-package-version.sh + ./mvnw -s .settings.xml package -DskipTests -T 1C --no-transfer-progress + jfrog rt mvn -s .settings.xml install -Pdocs -B --no-transfer-progress + jfrog rt mvn -s .settings.xml install -pl spring-cloud-dataflow-package -B --no-transfer-progress + jfrog rt build-publish + export JFROG_CLI_BUILD_NAME="${JFROG_CLI_BUILD_NAME/spring-cloud-dataflow/spring-cloud-skipper}" + PROJECT_VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) +# set +e +# echo "::info ::Project version=$PROJECT_VERSION" +# SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-snapshot-local) +# if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then +# echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# else +# echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# jfrog sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# fi + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Capture Test Results + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: test-results + path: '**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore +# clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + outputs: + version: ${{ env.spring_cloud_dataflow_version }} + database-tests: + if: github.repository_owner == 'spring-cloud' + runs-on: ubuntu-latest strategy: matrix: - os: [ubuntu-latest] - + db: [ 'ORACLE', 'DB2' ] steps: - - uses: actions/checkout@v2 - - name: Cache maven repository - uses: actions/cache@v1 + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: ./.github/actions/install-xmlutils + - uses: actions/cache@v3 with: - path: .m2 + path: ~/.m2/repository key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: | - ${{ runner.os }}-m2- - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + ${{ runner.os }}-m2-${{ matrix.db }} + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - name: Login dockerhub + uses: docker/login-action@v3 with: - java-version: 1.8 - - name: Build with Maven - run: mvn "-Dmaven.repo.local=.m2" -U -B install + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Test + shell: bash + timeout-minutes: 75 + run: | + ./mvnw clean install -s .settings.xml -DskipTests -am -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -B --no-transfer-progress + export ENABLE_${{ matrix.db }}=true + ./mvnw test -s .settings.xml -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server -Dgroups=${{ matrix.db }} -B --no-transfer-progress + - name: Test Report + uses: dorny/test-reporter@v1 + if: ${{ success() || failure() }} + with: + name: Unit Tests + path: '**/surefire-reports/*.xml' + reporter: java-junit + list-tests: failed + - name: Capture Test Results + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: test-results + path: '**/target/surefire-reports/**/*.*' + retention-days: 7 + if-no-files-found: ignore + # clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + + images: + name: Build and Publish Images + needs: + - build + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.build.outputs.version }} + secrets: inherit + scan: + runs-on: ubuntu-latest + if: ${{ inputs.enableSecurityScan == null || inputs.enableSecurityScan }} + steps: + - uses: actions/checkout@v4 + - name: Run Trivy vulnerability scanner in repo mode + uses: aquasecurity/trivy-action@master + with: + scan-type: 'fs' + ignore-unfixed: true + format: 'sarif' + output: 'trivy-results.sarif' + severity: 'CRITICAL,HIGH' + - name: Upload Trivy scan results to GitHub Security tab + uses: github/codeql-action/upload-sarif@v2 + with: + sarif_file: 'trivy-results.sarif' + - name: 'Scanned' + shell: bash + run: echo "::info ::Scanned" + done: + runs-on: ubuntu-latest + needs: [ scan, build, images, database-tests ] + steps: + - name: 'Done' + shell: bash + run: echo "::info ::Done" diff --git a/.github/workflows/common-carvel.yml b/.github/workflows/common-carvel.yml new file mode 100644 index 0000000000..4a9ef2925c --- /dev/null +++ b/.github/workflows/common-carvel.yml @@ -0,0 +1,113 @@ +name: common-carvel + +on: + workflow_call: + inputs: + package-name: + type: string + description: 'Package Name' + required: true + package-version: + type: string + description: 'Package Version' + required: true + package-bundle: + type: string + description: 'Package Bundle name' + required: true + repository-bundle: + type: string + description: 'Repository Bundle name' + required: true + dataflow-version: + type: string + description: 'Spring Cloud Data Flow Container Version' + required: true + server-version: + type: string + description: 'Server Version' + required: true + skipper-version: + type: string + description: 'Spring Cloud Skipper Container Version' + required: true + ctr-version: + type: string + description: 'Composed Task Runner Container Version' + required: true + server-repository: + type: string + description: 'Docker repo for Data Flow Server' + required: true + skipper-repository: + type: string + description: 'Docker repo for Skipper Server' + required: true + secrets: + DOCKERHUB_USERNAME: + DOCKERHUB_TOKEN: + +jobs: + # test templating before publishing a package + publish: + name: Publish + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: carvel-dev/setup-action@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + - name: Login dockerhub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + # Builds a package bundle + - name: Build Package Bundle + id: build-package-bundle + uses: ./.github/actions/build-package-bundle + with: + dataflow-version: ${{ inputs.dataflow-version }} + skipper-version: ${{ inputs.skipper-version }} + skipper-repository: ${{ inputs.skipper-repository }} + server-version: ${{ inputs.server-version }} + server-repository: ${{ inputs.server-repository }} + ctr-version: ${{ inputs.ctr-version }} + package-name: ${{ inputs.package-name }} + package-bundle-template: 'src/carvel/templates/bundle/package' + imgpkg-lock-template: 'src/carvel/templates/imgpkg' + config: 'src/carvel/config' + project-directory: '.' + + # Publishes scdf package bundle as it needs to be in place + # before repository bundle can be created. + - name: Publish Package Bundle + id: publish-package-bundle + uses: ./.github/actions/publish-bundle + with: + path: '${{ steps.build-package-bundle.outputs.bundle-path }}' + repository: ${{ inputs.package-bundle }} + version: ${{ inputs.package-version }} + project-directory: '.' + + # Builds a repository bundle + - name: Build Repository Bundle + id: build-repository-bundle + uses: ./.github/actions/build-repository-bundle + with: + version: ${{ inputs.package-version }} + repo-bundle-template: 'src/carvel/templates/bundle/repo' + package-bundle-repository: ${{ inputs.package-bundle }} + package-name: ${{ inputs.package-name }} + project-directory: '.' + + # Publishes scdf repo bundle + - name: Publish Repository Bundle + id: publish-repository-bundle + uses: ./.github/actions/publish-bundle + with: + path: '${{ steps.build-repository-bundle.outputs.bundle-path }}' + repository: ${{ inputs.repository-bundle }} + version: ${{ inputs.package-version }} + project-directory: '.' diff --git a/.github/workflows/docker-rm-tag.sh b/.github/workflows/docker-rm-tag.sh new file mode 100755 index 0000000000..0e3635c156 --- /dev/null +++ b/.github/workflows/docker-rm-tag.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +# springcloud/spring-cloud-skipper-server +IMAGE=$1 +# 2.11.0 +# 2.11.0-jdk8 +# 2.11.0-jdk11 +# 2.11.0-jdk17 +TAG=$2 + +login_data() { +cat < '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/.github/workflows/download-jar/gradlew.bat b/.github/workflows/download-jar/gradlew.bat new file mode 100644 index 0000000000..93e3f59f13 --- /dev/null +++ b/.github/workflows/download-jar/gradlew.bat @@ -0,0 +1,92 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/.github/workflows/download-jar/settings.gradle b/.github/workflows/download-jar/settings.gradle new file mode 100644 index 0000000000..b1256fe4e6 --- /dev/null +++ b/.github/workflows/download-jar/settings.gradle @@ -0,0 +1,10 @@ +/* + * This file was generated by the Gradle 'init' task. + * + * The settings file is used to specify which projects to include in your build. + * + * Detailed information about configuring a multi-project build in Gradle can be found + * in the user manual at https://docs.gradle.org/7.6.2/userguide/multi_project_builds.html + */ + +rootProject.name = 'download-maven' diff --git a/.github/workflows/export-app-versions.sh b/.github/workflows/export-app-versions.sh new file mode 100755 index 0000000000..0c47122ca5 --- /dev/null +++ b/.github/workflows/export-app-versions.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +set +e + +./mvnw --version +./mvnw help:evaluate -Dexpression=project.version > /dev/null +DATAFLOW_VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) +RC=$? +if ((RC!=0)); then + echo "DATAFLOW_VERSION=$DATAFLOW_VERSION" + exit $RC +fi +echo "DATAFLOW_VERSION=$DATAFLOW_VERSION" +SKIPPER_VERSION=$(./mvnw help:evaluate -Dexpression=spring-cloud-skipper.version -pl spring-cloud-dataflow-parent -q -DforceStdout) +if [[ "$SKIPPER_VERSION" = *"ERROR"* ]]; then + SKIPPER_VERSION=$(./mvnw help:evaluate -Dexpression=spring-cloud-skipper.version -q -DforceStdout) +fi +RC=$? +if ((RC!=0)); then + echo "SKIPPER_VERSION=$SKIPPER_VERSION" + exit $RC +fi +echo "SKIPPER_VERSION=$SKIPPER_VERSION" +export DATAFLOW_VERSION +export SKIPPER_VERSION diff --git a/.github/workflows/fix-deployment-files.yml b/.github/workflows/fix-deployment-files.yml index e377233d68..f5ef91bbe5 100644 --- a/.github/workflows/fix-deployment-files.yml +++ b/.github/workflows/fix-deployment-files.yml @@ -1,9 +1,13 @@ -name: Fix Deployment Files - master +name: Fix Deployment Files on: push: branches: - - master + - main + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} jobs: build: @@ -14,23 +18,23 @@ jobs: os: [ubuntu-latest] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: 1.8 + java-version: '17' + distribution: 'liberica' - name: Build with Maven - run: mvn process-resources -P deploymentfiles + run: ./mvnw process-resources -P deploymentfiles -B --no-transfer-progress - name: Check Deployment Files uses: peter-evans/create-pull-request@v3 with: token: ${{ secrets.GITHUB_TOKEN }} commit-message: Update k8s deployment files - committer: Spring Buildmaster - author: Spring Buildmaster + committer: SCDF Build Bot + author: SCDF Build Bot body: | Detected a change with `mvn process-resources -P deploymentfiles`. @@ -40,4 +44,3 @@ jobs: assignees: ilayaperumalg reviewers: jvalkeal,ilayaperumalg branch: github-actions/update-deployment-files - diff --git a/.github/workflows/generate-release-notes.sh b/.github/workflows/generate-release-notes.sh new file mode 100755 index 0000000000..a50aea60f0 --- /dev/null +++ b/.github/workflows/generate-release-notes.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +RUNNER_TMP="$1" +DEPLOYER_VERSION="$2" +DATAFLOW_UI_VERSION="$3" +DATAFLOW_VERSION="$4" +if [ "$4" == "" ]; then + echo "Usage: " + exit 1 +fi +RELEASE_NOTES_FILE="$RUNNER_TMP/release_notes.md" +RELEASE_NOTES_DATA="$RUNNER_TMP/release_notes_data.json" +RELEASE_NOTES_HEADERS1="$RUNNER_TMP/release_notes_headers1.json" +RELEASE_NOTES_HEADERS2="$RUNNER_TMP/release_notes_headers2.json" +RELEASE_NOTES_HEADERS3="$RUNNER_TMP/release_notes_headers3.json" +RELEASE_NOTES_FOOTERS1="$RUNNER_TMP/release_notes_footers1.json" +RELEASE_NOTES_FOOTERS2="$RUNNER_TMP/release_notes_footers2.json" +RELEASE_NOTES_FOOTERS3="$RUNNER_TMP/release_notes_footers3.json" +RELEASE_NOTES_ISSUES1="$RUNNER_TMP/release_notes_issues1.json" +RELEASE_NOTES_ISSUES2="$RUNNER_TMP/release_notes_issues2.json" +RELEASE_NOTES_ISSUES3="$RUNNER_TMP/release_notes_issues3.json" +RELEASE_NOTES_PROJECT1="$RUNNER_TMP/release_notes_project1.json" +RELEASE_NOTES_PROJECT2="$RUNNER_TMP/release_notes_project2.json" +RELEASE_NOTES_PROJECT3="$RUNNER_TMP/release_notes_project3.json" +echo "Retrieving headers" +gh issue list --repo spring-cloud/spring-cloud-deployer \ + --search milestone:$DEPLOYER_VERSION \ + --label automation/rlnotes-header \ + --state all --json title,body \ + --jq '{headers:map(.),headerslength:(length)}' \ + > $RELEASE_NOTES_HEADERS1 +gh issue list --repo spring-cloud/spring-cloud-dataflow-ui \ + --search milestone:$DATAFLOW_UI_VERSION \ + --label automation/rlnotes-header \ + --state all --json title,body \ + --jq '{headers:map(.),headerslength:(length)}' \ + > $RELEASE_NOTES_HEADERS2 +gh issue list --repo spring-cloud/spring-cloud-dataflow \ + --search milestone:$DATAFLOW_VERSION \ + --label automation/rlnotes-header \ + --state all --json title,body \ + --jq '{headers:map(.),headerslength:(length)}' \ + > $RELEASE_NOTES_HEADERS3 +echo "Retrieving footers" +gh issue list --repo spring-cloud/spring-cloud-deployer \ + --search milestone:$DEPLOYER_VERSION \ + --label automation/rlnotes-footer \ + --state all --json title,body \ + --jq '{footers:map(.),footerslength:(length)}' \ + > $RELEASE_NOTES_FOOTERS1 +gh issue list --repo spring-cloud/spring-cloud-dataflow-ui \ + --search milestone:$DATAFLOW_UI_VERSION \ + --label automation/rlnotes-footer \ + --state all --json title,body \ + --jq '{footers:map(.),footerslength:(length)}' \ + > $RELEASE_NOTES_FOOTERS2 +gh issue list --repo spring-cloud/spring-cloud-dataflow \ + --search milestone:$DATAFLOW_VERSION \ + --label automation/rlnotes-footer \ + --state all --json title,body \ + --jq '{footers:map(.),footerslength:(length)}' \ + > $RELEASE_NOTES_FOOTERS3 +echo "Creating project data" +echo "{\"name\":\"Spring Cloud Dataflow Deployer\",\"version\":\"$DEPLOYER_VERSION\"}" > $RELEASE_NOTES_PROJECT1 +echo "{\"name\":\"Spring Cloud Dataflow UI\",\"version\":\"$DATAFLOW_UI_VERSION\"}" > $RELEASE_NOTES_PROJECT2 +echo "{\"name\":\"Spring Cloud Dataflow\",\"version\":\"$DATAFLOW_VERSION\"}" > $RELEASE_NOTES_PROJECT3 + +echo "Retrieving issues" +gh issue list --repo spring-cloud/spring-cloud-deployer \ + --search milestone:$DEPLOYER_VERSION \ + --state all --json number,title,labels \ + --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-deployer"})}' \ + > $RELEASE_NOTES_ISSUES1 +gh issue list --repo spring-cloud/spring-cloud-dataflow-ui \ + --search milestone:$DATAFLOW_UI_VERSION \ + --state all --json number,title,labels \ + --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow-ui"})}' \ + > $RELEASE_NOTES_ISSUES2 +gh issue list --repo spring-cloud/spring-cloud-dataflow \ + --search milestone:$DATAFLOW_VERSION \ + --state all --limit 100 --json number,title,labels \ + --jq '{issues:map(select((.labels | length == 0) or (any(.labels[].name; startswith("automation/rlnotes")|not))) + {repo:"spring-cloud/spring-cloud-dataflow"})}' \ + > $RELEASE_NOTES_ISSUES3 +echo "Creating release notes data" +jq -s '{issues:(.[0].issues + .[1].issues + .[2].issues),headers:(.[3].headers + .[4].headers + .[5].headers),headerslength:(.[3].headerslength + .[4].headerslength + .[5].headerslength),footers:(.[6].footers + .[7].footers + .[8].footers), footerslength:(.[6].footerslength + .[7].footerslength + .[8].footerslength),projects:{spring_cloud_deployer:{name:"Spring Cloud Deployer",version:(.[9].version)},spring_cloud_skipper:{name:"Spring Cloud Skipper",version:(.[11].version)},spring_cloud_dataflow_ui:{name:"Spring Cloud Dataflow UI",version:(.[10].version)},spring_cloud_dataflow:{name:"Spring Cloud Dataflow",version:(.[11].version)}}}' \ + $RELEASE_NOTES_ISSUES1 $RELEASE_NOTES_ISSUES2 $RELEASE_NOTES_ISSUES3 \ + $RELEASE_NOTES_HEADERS1 $RELEASE_NOTES_HEADERS2 $RELEASE_NOTES_HEADERS3 \ + $RELEASE_NOTES_FOOTERS1 $RELEASE_NOTES_FOOTERS2 $RELEASE_NOTES_FOOTERS3 \ + $RELEASE_NOTES_PROJECT1 $RELEASE_NOTES_PROJECT2 $RELEASE_NOTES_PROJECT3 \ +> $RELEASE_NOTES_DATA +echo "Applying mustache templates" +mustache $RELEASE_NOTES_DATA .github/rlnotes.mustache > $RELEASE_NOTES_FILE diff --git a/.github/workflows/github-release.yml b/.github/workflows/github-release.yml new file mode 100644 index 0000000000..d89beb23be --- /dev/null +++ b/.github/workflows/github-release.yml @@ -0,0 +1,77 @@ +name: Github Release + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + ghrelease: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_spring_cloud_deployer_version + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version + # tooling + - name: Install Tooling + run: | + curl -sSL https://github.com/cbroglie/mustache/releases/download/v1.4.0/mustache_1.4.0_linux_amd64.tar.gz | sudo tar -C /usr/local/bin/ --no-same-owner -xzv mustache + # release notes and gh release + - name: Release + shell: bash + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + run: | + # generate release notes + .github/workflows/generate-release-notes.sh "${{ runner.temp }}" "$BUILD_ZOO_HANDLER_spring_cloud_deployer_version" "$BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version" "$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version" + export PACKAGE_VERSION=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} + echo "## What's Changed in Spring Cloud Dataflow and Skipper from `v` to `v${PACKAGE_VERSION}` >> "${{runner.temp}}/release_notes.md" + gh pr list --search "milestone:$PACKAGE_VERSION" --state closed --json=number,title,author --jq '.[] | "* " + .title + " by @" + .author.login + " in https://github.com/spring-cloud/spring-cloud-dataflow/pull/" + (.number|tostring)' >> "${{runner.temp}}/release_notes.md" + gh release create v$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version \ + --draft \ + --title "Spring Cloud Data Flow $BUILD_ZOO_HANDLER_spring_cloud_dataflow_version" \ + --notes-file "${{runner.temp}}/release_notes.md" + # retrieve spring-cloud-dataflow-package and upload to github release + ./mvnw -s .settings.xml build-helper:parse-version versions:set -DskipResolution=true -DprocessAllModules=true -DgenerateBackupPoms=false -Dartifactory.publish.artifacts=false -DnewVersion=$PACKAGE_VERSION -B --no-transfer-progress + ./spring-cloud-dataflow-package/set-package-version.sh + ./mvnw -s .settings.xml package -pl spring-cloud-dataflow-package -B --no-transfer-progress + PACKAGE_FILE="./spring-cloud-dataflow-package/target/spring-cloud-dataflow-oss-install-${PACKAGE_VERSION}.zip" + gh release upload v$PACKAGE_VERSION $PACKAGE_FILE --clobber + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "ghrelease-succeed" + } + + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.REPO_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "ghrelease-failed" + } diff --git a/.github/workflows/images.json b/.github/workflows/images.json new file mode 100644 index 0000000000..83652cd335 --- /dev/null +++ b/.github/workflows/images.json @@ -0,0 +1,34 @@ +{ + "include": [ + { + "name": "spring-cloud-dataflow-server", + "path": "spring-cloud-dataflow-server/target", + "image": "springcloud/spring-cloud-dataflow-server" + }, + { + "name": "spring-cloud-skipper-server", + "path": "spring-cloud-skipper/spring-cloud-skipper-server/target", + "image": "springcloud/spring-cloud-skipper-server" + }, + { + "name": "spring-cloud-dataflow-composed-task-runner", + "path": "spring-cloud-dataflow-composed-task-runner/target", + "image": "springcloud/spring-cloud-dataflow-composed-task-runner" + }, + { + "name": "spring-cloud-dataflow-single-step-batch-job", + "path": "spring-cloud-dataflow-single-step-batch-job/target", + "image": "springcloud/spring-cloud-dataflow-single-step-batch-job" + }, + { + "name": "spring-cloud-dataflow-tasklauncher-sink-kafka", + "path": "spring-cloud-dataflow-tasklauncher/spring-cloud-dataflow-tasklauncher-sink-kafka/target", + "image": "springcloud/spring-cloud-dataflow-tasklauncher-sink-kafka" + }, + { + "name": "spring-cloud-dataflow-tasklauncher-sink-rabbit", + "path": "spring-cloud-dataflow-tasklauncher/spring-cloud-dataflow-tasklauncher-sink-rabbit/target", + "image": "springcloud/spring-cloud-dataflow-tasklauncher-sink-rabbit" + } + ] +} diff --git a/.github/workflows/issue-handler.yml b/.github/workflows/issue-handler.yml index 4957d63db3..b704d3de1e 100644 --- a/.github/workflows/issue-handler.yml +++ b/.github/workflows/issue-handler.yml @@ -29,7 +29,10 @@ jobs: "cppwfs", "mminella", "dturanski", - "sobychacko" + "onobc", + "claudiahub", + "sobychacko", + "corneil" ] }, "recipes": [ diff --git a/.github/workflows/label-manage.yml b/.github/workflows/label-manage.yml deleted file mode 100644 index 415f015425..0000000000 --- a/.github/workflows/label-manage.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Labels Manage - -on: - push: - branches: - - 'master' - paths: - - '.github/labels-manage.yml' - - '.github/workflows/label-manage.yml' - workflow_dispatch: - -jobs: - labeler: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Mangle Labels - uses: crazy-max/ghaction-github-labeler@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - yaml-file: .github/labels-manage.yml - dry-run: false - skip-delete: true diff --git a/.github/workflows/milestone-controller.yml b/.github/workflows/milestone-controller.yml new file mode 100644 index 0000000000..84cd46f033 --- /dev/null +++ b/.github/workflows/milestone-controller.yml @@ -0,0 +1,117 @@ +# control workflow which orchestrates manual milestone builds in +# all dataflow repos, aka one click train build. +name: Milestone Controller + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: false + build-zoo-handler-properties: + description: 'Define milestone version as milestone_version=M1' + required: false + +jobs: + build: + runs-on: ubuntu-latest + steps: + + # initial is when user starts workflow from UI(context is empty) + # then train build goes through via repos using defined hander rules + - name: Handle Zoo Control + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-max: 20 + dispatch-handler-config: > + [ + { + "if": "initial == true", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-deployer", + "ref": "main", + "workflow": "milestone-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow-ui", + "ref": "main", + "workflow": "milestone-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "milestone-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "carvel-worker.yml" + } + }, + { + "if": "data.event == 'carvel-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "promote-milestone.yml" + } + }, + { + "if": "data.event == 'promotion-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "github-release.yml" + } + }, + { + "if": "data.event == 'promotion-failed'", + "action": "fail", + "fail": { + "message": "artifactory promotion failed" + } + }, + { + "if": "data.event == 'ghrelease-failed'", + "action": "fail", + "fail": { + "message": "github release failed" + } + }, + { + "if": "data.event == 'build-failed'", + "action": "fail", + "fail": { + "message": "something went wrong in build train" + } + }, + { + "if": "data.event == 'carvel-failed'", + "action": "fail", + "fail": { + "message": "hi, something went wrong with carvel" + } + } + ] diff --git a/.github/workflows/milestone-worker.yml b/.github/workflows/milestone-worker.yml new file mode 100644 index 0000000000..377e7deafb --- /dev/null +++ b/.github/workflows/milestone-worker.yml @@ -0,0 +1,156 @@ +name: Milestone Worker + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} +jobs: + build: + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: ./.github/actions/install-xmlutils + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2- + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-milestone-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main-milestone >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + +# zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_milestone_version + BUILD_ZOO_HANDLER_spring_cloud_deployer_version + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version + - name: Build and Publish + run: | + ./mvnw build-helper:parse-version versions:set \ + -gs .github/settings.xml \ + -Pstagingmilestone \ + -DprocessAllModules=true \ + -DgenerateBackupPoms=false \ + -Dartifactory.publish.artifacts=false \ + -DnewVersion='${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}-'${BUILD_ZOO_HANDLER_milestone_version} \ + -B --no-transfer-progress + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version}" + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -Pstagingmilestone \ + -DgenerateBackupPoms=false \ + -Dproperty=dataflow.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} \ + -B --no-transfer-progress + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -Pstagingmilestone \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-deployer.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version} \ + -B --no-transfer-progress + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -Pstagingmilestone \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-dataflow-ui.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version} \ + -B --no-transfer-progress + jfrog rt build-clean + ./spring-cloud-dataflow-package/set-package-version.sh + ./mvnw -s .settings.xml package -DskipTests -T 1C --no-transfer-progress + jfrog rt mvn -gs .github/settings.xml -Pstagingmilestone,deploymentfiles,docs -B install -DskipTests --no-transfer-progress + jfrog rt build-publish + PROJECT_VERSION=$(./mvnw -gs .github/settings.xml -Pstagingmilestone help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$PROJECT_VERSION >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main-milestone >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + +# set +e +# echo "::info ::Project version=$PROJECT_VERSION" +# SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-milestone-local) +# if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then +# echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# else +# echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" +# jfrog rt sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" +# fi + +# zoo tag + - name: Tag Release + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + tag-release-branch: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + tag-release-tag: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + tag-release-tag-prefix: v + +# build and publish images via composite action + - name: Build and Publish Images + uses: ./.github/actions/build-images + with: + version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }} + dockerhub-password: ${{ secrets.DOCKERHUB_TOKEN }} + GCR_JSON_KEY: ${{ secrets.GCR_JSON_KEY }} + +# zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-succeed" + } + +# zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-failed", + "message": "spring-cloud-dataflow failed" + } +# clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr diff --git a/.github/workflows/next-dev-version-controller.yml b/.github/workflows/next-dev-version-controller.yml new file mode 100644 index 0000000000..737d134378 --- /dev/null +++ b/.github/workflows/next-dev-version-controller.yml @@ -0,0 +1,61 @@ +name: Next Dev Version Controller + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: false + +jobs: + build: + runs-on: ubuntu-latest + steps: + + # initial is when user starts workflow from UI(context is empty) + # then train build goes through via repos using defined hander rules + - name: Handle Zoo Control + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-max: 15 + dispatch-handler-config: > + [ + { + "if": "initial == true", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-deployer", + "ref": "main", + "workflow": "next-dev-version-worker.yml" + } + }, + { + "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-deployer' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow-ui", + "ref": "main", + "workflow": "next-dev-version-worker.yml" + } + }, + { + "if": "data.event == 'next-dev-version-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "next-dev-version-worker.yml" + } + }, + { + "if": "data.event == 'next-dev-version-failed'", + "action": "fail", + "fail": { + "message": "hi, something went wrong" + } + } + ] diff --git a/.github/workflows/next-dev-version-worker.yml b/.github/workflows/next-dev-version-worker.yml new file mode 100644 index 0000000000..7bf20d379e --- /dev/null +++ b/.github/workflows/next-dev-version-worker.yml @@ -0,0 +1,129 @@ +name: Next Dev Version Worker + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + build: + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + # cache maven .m2 + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2- + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-milestone \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-release-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main-ndv >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + + # build and publish to configured target + - name: Build and Publish + run: | + ./mvnw build-helper:parse-version versions:set \ + -DprocessAllModules=true \ + -DgenerateBackupPoms=false \ + -Dartifactory.publish.artifacts=false \ + -DnewVersion='${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.nextIncrementalVersion}-SNAPSHOT' \ + -B --no-transfer-progress + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version}" + ./mvnw versions:set-property \ + -DgenerateBackupPoms=false \ + -Dproperty=dataflow.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} \ + -B --no-transfer-progress + ./mvnw versions:set-property \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-deployer.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version} \ + -B --no-transfer-progress + ./mvnw versions:set-property \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-dataflow-ui.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version} \ + -B --no-transfer-progress + jfrog rt build-clean + ./spring-cloud-dataflow-package/set-package-version.sh + ./mvnw -s .settings.xml package -DskipTests -T 1C --no-transfer-progress + jfrog rt mvn -s .settings.xml install -DskipTests -B -T 1C --no-transfer-progress + jfrog rt build-publish + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main-ndv >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + + # zoo commit + - name: Commit Next Dev Changes + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + commit-changes-branch: main + commit-changes-message: Next development version + + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "next-dev-version-succeed" + } + + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "next-dev-version-failed", + "message": "spring-cloud-dataflow-build next version failed" + } + + # clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr diff --git a/.github/workflows/promote-milestone.yml b/.github/workflows/promote-milestone.yml new file mode 100644 index 0000000000..e635c672ae --- /dev/null +++ b/.github/workflows/promote-milestone.yml @@ -0,0 +1,58 @@ +name: Promote Milestone + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +jobs: + promote: + runs-on: ubuntu-latest + environment: promote + steps: + # need repo to push release branch and a tag + - uses: actions/checkout@v4 + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname + BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Promote Build + run: | + jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber libs-milestone-local + jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber libs-milestone-local + jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber libs-milestone-local + + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "promotion-succeed" + } + + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.REPO_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "promotion-failed" + } diff --git a/.github/workflows/promote-release.yml b/.github/workflows/promote-release.yml new file mode 100644 index 0000000000..23def25a93 --- /dev/null +++ b/.github/workflows/promote-release.yml @@ -0,0 +1,58 @@ +name: Promote Release + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +jobs: + promote: + runs-on: ubuntu-latest + environment: promote + steps: + # need repo to push release branch and a tag + - uses: actions/checkout@v4 + # zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname + BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Promote Build + run: | + jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildname $BUILD_ZOO_HANDLER_spring_cloud_deployer_buildnumber libs-release-local + jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_buildnumber libs-release-local + jfrog rt build-promote $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname $BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber libs-release-local + + # zoo success + - name: Notify Build Success Zoo Handler Controller + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "promote-succeed" + } + + # zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.REPO_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "promotion-failed" + } diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 0000000000..c9ca453802 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,93 @@ +name: Publish Documentation + +on: + workflow_dispatch: + inputs: + version: + description: 'Version to publish' + required: true +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/install-xmlutils + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2- + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-release-staging \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-staging-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main-release >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + +# build and publish to configured target + - name: Build and Publish + run: | + ./mvnw build-helper:parse-version versions:set \ + -gs .github/settings.xml \ + -DskipResolution=true \ + -DprocessAllModules=true \ + -DgenerateBackupPoms=false \ + -Dartifactory.publish.artifacts=false \ + -DnewVersion=${{ inputs.version }} \ + -B --no-transfer-progress + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=dataflow.version \ + -DnewVersion=${{ inputs.version }} \ + -B --no-transfer-progress + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=skipper.version \ + -DnewVersion=${{ inputs.version }} \ + -B --no-transfer-progress + spring_cloud_dataflow_version=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + echo "spring_cloud_dataflow_version=$spring_cloud_dataflow_version" >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${spring_cloud_dataflow_version}" + echo "::notice ::build-clean" + jfrog rt build-clean + echo "::notice ::set-package-version" + ./spring-cloud-dataflow-package/set-package-version.sh + echo "::notice ::install" + ./mvnw -gs .github/settings.xml -am -pl :spring-cloud-skipper-server-core install -DskipTests -T 1C -B --no-transfer-progress + jfrog rt mvn -gs .github/settings.xml \ + -Pstagingrelease,deploymentfiles,docs \ + -pl :spring-cloud-dataflow-classic-docs,:spring-cloud-dataflow-docs,:spring-cloud-skipper-docs \ + -B install -DskipTests --no-transfer-progress + echo "::notice ::build-publish" + jfrog rt build-publish + echo "::info ::spring_cloud_dataflow_version=$spring_cloud_dataflow_version" + echo "::info ::spring_cloud_dataflow_buildname=$JFROG_CLI_BUILD_NAME" + echo "::info ::spring_cloud_dataflow_buildnumber=$JFROG_CLI_BUILD_NUMBER" diff --git a/.github/workflows/release-controller.yml b/.github/workflows/release-controller.yml new file mode 100644 index 0000000000..92df60d36d --- /dev/null +++ b/.github/workflows/release-controller.yml @@ -0,0 +1,124 @@ +# control workflow which orchestrates manual release builds in +# all dataflow repos, aka one click train build. +name: Release Controller + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: false + +jobs: + build: + runs-on: ubuntu-latest + steps: + + # initial is when user starts workflow from UI(context is empty) + # then train build goes through via repos using defined hander rules + - name: Handle Zoo Control + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-max: 20 + dispatch-handler-config: > + [ + { + "if": "initial == true", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-deployer", + "ref": "main", + "workflow": "release-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-deployer' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow-ui", + "ref": "main", + "workflow": "release-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow-ui' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "release-worker.yml" + } + }, + { + "if": "data.event == 'build-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "carvel-worker.yml" + } + }, + { + "if": "data.event == 'carvel-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "promote-release.yml" + } + }, + { + "if": "data.event == 'promote-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "central-release.yml" + } + }, + { + "if": "data.event == 'central-succeed' && data.repo == 'spring-cloud-dataflow' && data.owner == 'spring-cloud'", + "action": "workflow_dispatch", + "workflow_dispatch": { + "owner": "spring-cloud", + "repo": "spring-cloud-dataflow", + "ref": "main", + "workflow": "github-release.yml" + } + }, + { + "if": "data.event == 'promotion-failed'", + "action": "fail", + "fail": { + "message": "artifactory promotion failed" + } + }, + { + "if": "data.event == 'central-failed'", + "action": "fail", + "fail": { + "message": "central sync failed" + } + }, + { + "if": "data.event == 'ghrelease-failed'", + "action": "fail", + "fail": { + "message": "github release failed" + } + }, + { + "if": "data.event == 'build-failed'", + "action": "fail", + "fail": { + "message": "something went wrong in build train" + } + } + ] diff --git a/.github/workflows/release-worker.yml b/.github/workflows/release-worker.yml new file mode 100644 index 0000000000..9c7858bf81 --- /dev/null +++ b/.github/workflows/release-worker.yml @@ -0,0 +1,191 @@ +name: Release Worker + +on: + workflow_dispatch: + inputs: + build-zoo-handler: + description: 'Build Zoo Handler Payload' + required: true + +env: + ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }} + ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} + +jobs: + build: + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v4 + - run: find . -type f -name "*.sh" -exec chmod a+x '{}' \; + - uses: ./.github/actions/install-xmlutils + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'liberica' + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-m2- + - uses: jvalkeal/setup-maven@v1 + with: + maven-version: 3.8.8 + maven-mirror: 'https://dlcdn.apache.org/maven/maven-3/' + - uses: jfrog/setup-jfrog-cli@v3 + env: + JF_URL: 'https://repo.spring.io/' + JF_ENV_SPRING: ${{ secrets.JF_ARTIFACTORY_SPRING }} + - name: Configure JFrog Cli + run: | + jfrog rt mvnc \ + --server-id-resolve=${{ vars.JF_SERVER_ID }} \ + --server-id-deploy=${{ vars.JF_SERVER_ID }} \ + --repo-resolve-releases=libs-release-staging \ + --repo-resolve-snapshots=libs-snapshot \ + --repo-deploy-releases=libs-staging-local \ + --repo-deploy-snapshots=libs-snapshot-local + echo JFROG_CLI_BUILD_NAME=spring-cloud-dataflow-main-release >> $GITHUB_ENV + echo JFROG_CLI_BUILD_NUMBER=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + +# zoo extract and ensure + - name: Extract Zoo Context Properties + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-extract-context-properties: true + ensure-env: | + BUILD_ZOO_HANDLER_spring_cloud_deployer_version + BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version +# build and publish to configured target + - name: Build and Publish + run: | + ./mvnw build-helper:parse-version versions:set \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DprocessAllModules=true \ + -DgenerateBackupPoms=false \ + -Dartifactory.publish.artifacts=false \ + -DnewVersion='${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}' \ + -B --no-transfer-progress + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$BUILD_ZOO_HANDLER_spring_cloud_dataflow_version >> $GITHUB_ENV + echo "::notice ::set dataflow.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version}" + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=dataflow.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_version} \ + -B --no-transfer-progress + echo "::notice ::set spring-cloud-deployer.version=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version}" + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -Pstagingrelease \ + -DskipResolution=true \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-deployer.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_deployer_version} \ + -B --no-transfer-progress + echo "::notice ::set spring-cloud-dataflow-ui.version=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version}" + ./mvnw versions:set-property \ + -gs .github/settings.xml \ + -DskipResolution=true \ + -Pstagingrelease \ + -DgenerateBackupPoms=false \ + -Dproperty=spring-cloud-dataflow-ui.version \ + -DnewVersion=${BUILD_ZOO_HANDLER_spring_cloud_dataflow_ui_version} \ + -B --no-transfer-progress + echo "::notice ::build-clean" + jfrog rt build-clean + echo "::notice ::set-package-version" + ./spring-cloud-dataflow-package/set-package-version.sh + echo "::notice ::install" + ./mvnw -s .settings.xml package -Pstagingrelease -DskipTests -T 1C --no-transfer-progress + jfrog rt mvn -gs .github/settings.xml -Pstagingrelease,deploymentfiles,docs -B install -DskipTests --no-transfer-progress + echo "::notice ::build-publish" + jfrog rt build-publish + PROJECT_VERSION=$(./mvnw -gs .github/settings.xml -Pstagingrelease help:evaluate -Dexpression=project.version -q -DforceStdout) + spring_cloud_dataflow_version=$PROJECT_VERSION + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$spring_cloud_dataflow_version >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main-release >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER >> $GITHUB_ENV + echo BUILD_ZOO_HANDLER_spring_cloud_skipper_version=$spring_cloud_dataflow_version >> $GITHUB_ENV + + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=$spring_cloud_dataflow_version" + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=spring-cloud-dataflow-main-release" + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=$GITHUB_RUN_NUMBER" + echo "::info ::BUILD_ZOO_HANDLER_spring_cloud_skipper_version=$spring_cloud_dataflow_version" + + # set +e + # echo "::info ::Project version=$PROJECT_VERSION" + # SKIPPER_DOCS_PATTERN=$(.github/workflows/skipper-docs-name.sh $PROJECT_VERSION libs-staging-local) + # if [[ "$SKIPPER_DOCS_PATTERN" == *"does not exist"* ]]; then + # echo "::error ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" + # else + # echo "::info ::Skipper Docs URL=$SKIPPER_DOCS_PATTERN" + # jfrog rt sp --build "$SKIPPER_DOCS_PATTERN" "buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" + # echo "::info ::Skipper Docs Set Properties buildName=$JFROG_CLI_BUILD_NAME;buildNumber=$JFROG_CLI_BUILD_NUMBER" + # fi +# zoo tag + - name: Tag Release + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + tag-release-branch: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + tag-release-tag: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + tag-release-tag-prefix: v +# clean m2 cache + - name: Clean cache + run: | + find ~/.m2/repository -type d -name '*SNAPSHOT' | xargs rm -fr + outputs: + version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }} + BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }} + BUILD_ZOO_HANDLER_spring_cloud_skipper_version: ${{ env.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }} + images: + name: Build and Publish Images + needs: + - build + uses: ./.github/workflows/build-images.yml + with: + version: ${{ needs.build.outputs.version }} + secrets: inherit + wrap: + needs: [ build, images ] + runs-on: ubuntu-latest + steps: + - name: Save env + shell: bash + if: ${{ success() }} + run: | + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_version }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildname }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_dataflow_buildnumber }}" >> $GITHUB_ENV + echo "BUILD_ZOO_HANDLER_spring_cloud_skipper_version=${{ needs.build.outputs.BUILD_ZOO_HANDLER_spring_cloud_skipper_version }}" >> $GITHUB_ENV + +# zoo success + - name: Notify Build Success Zoo Handler Controller + if: ${{ success() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-succeed" + } + +# zoo failure + - name: Notify Build Failure Zoo Handler Controller + if: ${{ failure() }} + uses: jvalkeal/build-zoo-handler@v0.0.4 + with: + dispatch-handler-token: ${{ secrets.SCDF_ACCESS_TOKEN }} + dispatch-handler-client-payload-data: > + { + "event": "build-failed", + "message": "spring-cloud-dataflow failed" + } diff --git a/.github/workflows/schedule-train-builds.yml b/.github/workflows/schedule-train-builds.yml new file mode 100644 index 0000000000..7d7001c9b1 --- /dev/null +++ b/.github/workflows/schedule-train-builds.yml @@ -0,0 +1,17 @@ +name: Schedule Snapshot Train Builds + +on: + workflow_dispatch: + +jobs: + schedule: + runs-on: ubuntu-latest + strategy: + matrix: + branch: [main, 2.11.x] + steps: + - uses: benc-uk/workflow-dispatch@v1 + with: + workflow: Build Snapshot Controller + token: ${{ secrets.SCDF_ACCESS_TOKEN }} + ref: ${{ matrix.branch }} diff --git a/.github/workflows/skipper-docs-name.sh b/.github/workflows/skipper-docs-name.sh new file mode 100755 index 0000000000..ef030d56d7 --- /dev/null +++ b/.github/workflows/skipper-docs-name.sh @@ -0,0 +1,37 @@ +#!/bin/bash +VERSION=$1 +if [ "$1" == "" ]; then + echo "Version is required" + exit 1 +fi +if [ "$2" != "" ]; then + REPO="$2" +fi + +if [ -z "$REPO" ]; then + if [[ "$VERSION" == *"-SNAPSHOT"* ]]; then + REPO="libs-snapshot-local" + elif [[ "$VERSION" == *"-M"* ]] || [[ "${VERSION}" == *"-RC"* ]]; then + REPO="libs-milestone-local" + else + REPO="libs-release-local" + fi +fi +CURL_TOKEN="$ARTIFACTORY_USERNAME:$ARTIFACTORY_PASSWORD" +if [[ "$REPO" == *"snapshot"* ]]; then + META_DATA_URL="https://repo.spring.io/artifactory/$REPO/org/springframework/cloud/spring-cloud-skipper-docs/$%7BVERSION%7D/maven-metadata.xml" + curl -u "$CURL_TOKEN" --basic -o maven-metadata.xml -s -XGET -L "$META_DATA_URL" # > /dev/null + DL_TS=$(xmllint --xpath "/metadata/versioning/snapshot/timestamp/text()" maven-metadata.xml | sed 's/\.//') + DL_VERSION=$(xmllint --xpath "/metadata/versioning/snapshotVersions/snapshotVersion[extension/text() = 'pom' and updated/text() = '$DL_TS']/value/text()" maven-metadata.xml) + REMOTE_PATH="org/springframework/cloud/spring-cloud-skipper-docs/${VERSION}/spring-cloud-skipper-docs-${DL_VERSION}.zip" +else + REMOTE_PATH="org/springframework/cloud/spring-cloud-skipper-docs/${VERSION}/spring-cloud-skipper-docs-${VERSION}.zip" +fi +REMOTE_FILE="https://repo.spring.io/artifactory/$%7BREPO%7D/$REMOTE_PATH" +RC=$(curl -u "$CURL_TOKEN" --basic -o /dev/null -L -s -Iw '%{http_code}' "$REMOTE_FILE") +if ((RC<300)); then + echo "$REMOTE_PATH" +else + echo "$REMOTE_FILE does not exist. Error code $RC" + exit 2 +fi diff --git a/.github/workflows/workflow.adoc b/.github/workflows/workflow.adoc new file mode 100644 index 0000000000..bbd98775dc --- /dev/null +++ b/.github/workflows/workflow.adoc @@ -0,0 +1,71 @@ += Workflow Reference + +This README serves as a guide to the GitHub Action workflows included in this repository. +It outlines the purpose and functionality of each workflow, detailing their role in the CI and release processes. +Additionally, this document provides an overview of the scripts and actions utilized in these workflows, offering insights into how they work together in SCDF's CI/CD pipeline. + +This document is a work in progress, and as various workflows are updated, the documentation will be revised to reflect both existing and new behaviors. + + +== Building Docker Images and pushing the containers to DockerHub +.This diagram shows the flow of execution of how workflows create Docker imges. +``` +┌─────────────────────────┐ +│ │ +│ │ +│build-snapshot-worker.yml┼────┐ +│ │ │ +│ │ │ +└─────────────────────────┘ │ +┌─────────────────────────┐ │ +│ │ │ +│ │ │ +│ ci-images.yml ┼────┤ ┌─────────────────────────┐ ┌─────────────────────────┐ +│ │ │ │ │ │ │ +│ │ │ │ │ │ │ +└─────────────────────────┘ ├────►│ build-images.yml ┼────────►│ build-image.sh │ +┌─────────────────────────┐ │ │ │ │ │ +│ │ │ │ │ │ │ +│ │ │ └───────────┬─────────────┘ └─────────────────────────┘ +│ ci.yml ┼────┤ │ +│ │ │ │ +│ │ │ ┌───────────┴─────────────┐ +└─────────────────────────┘ │ │ │ +┌─────────────────────────┐ │ │ │ +│ │ │ │ images.json │ +│ │ │ │ │ +│ release-worker.yml ┼────┘ │ │ +│ │ └─────────────────────────┘ +│ │ +└─────────────────────────┘ +``` + +Part of the release and CI process involves creating and pushing images to a registry (such as DockerHub) for the Dataflow server, Skipper server, CTR app, and other components. +This process is managed by the `build-images` (build-images.yml) workflow. While the `build-images` workflow is typically not run independently, it is invoked by other workflows that handle CI builds and releases. +The `build-images` workflow determines which images to create based on the `images.json` file. +This file contains metadata on the primary SCDF components that need to have an associated image. +Each entry specifies the location (directory) where the jar can be found, jar name, and image name for each artifact that will be used to construct the image. +For each entry in the `images.json` file, the workflow calls the `build-image.sh` script, which retrieves the jar, builds the image, and then pushes it to the registry. + +SCDF also provides images for external applications that support some of the optional features that are offered by dataflow. +These include Grafana and Prometheus local. +These images are created and pushed using the docker/build-push-action@v2 action. + +=== Scripts used to build images +As mentioned above, the `build-image.sh` script is responsible for building the specified image based on the parameters provided and then pushing the image to Dockerhub. +This script uses Paketo to build an image for each of the supported Java versions using the corresponding jar file. +The resulting image name will look something like `spring-cloud-dataflow-server:3.0.0-SNAPSHOT-jdk17`. +Additionally, the script creates a default image using the default Java version as specified by the `DEFAULT_JDK` environment variable. + +The format for running the `build-image.sh` is as follows: +[source, bash] +``` +bash +./build-image.sh +``` + +There is an optional `DEFAULT_JDK` environment variable that allows you to set the JDK version for the default image created. +If not the script will set it to its current setting (which as of the writing of this document is `17`). + +NOTE: When new releases of Java are available and are compliant with the SCDF release, they need to be added to the `build-image.sh` script. + diff --git a/.gitignore b/.gitignore index df96ea6ca9..106e2c93fb 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,7 @@ logs/ scdf-logs/ .attach_pid* .jfrog/ +.gradle # Eclipse artifacts, including WTP generated manifests .classpath @@ -49,3 +50,20 @@ cleanup.sh # GitHub actions .m2 + +# Carvel +node_modules +!/.idea/checkstyle-idea.xml +yagni/ +workspace/ +*.sh.txt +*.yml.txt +/src/deploy/shell/*.jar +*.log +*.shell +/*.json +*.tar +/srp +/srp*gz +/observer/* + diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties index 5bb39dc257..3be380875a 100644 --- a/.mvn/wrapper/maven-wrapper.properties +++ b/.mvn/wrapper/maven-wrapper.properties @@ -1,2 +1 @@ -distributionUrl=https://repo1.maven.org/maven2/org/apache/maven/apache-maven/3.6.0/apache-maven-3.6.0-bin.zip - +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip diff --git a/.run/spring-cloud-dataflow [clean,install].run.xml b/.run/spring-cloud-dataflow [clean,install].run.xml new file mode 100644 index 0000000000..83b1bded63 --- /dev/null +++ b/.run/spring-cloud-dataflow [clean,install].run.xml @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.sdkmanrc b/.sdkmanrc new file mode 100644 index 0000000000..93565edd75 --- /dev/null +++ b/.sdkmanrc @@ -0,0 +1,21 @@ +# +# Copyright 2005-2022 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Use `sdk env` to manually apply this file. +# Set `sdkman_auto_env=true` in $HOME/.sdkman/etc/config to make it automatic. +# +# NOTE: Switching branches will NOT trigger a change. Only switching folder will do it. Use `sdk env` to apply when simply switching branches. + +java=8.0.402-librca diff --git a/.settings.xml b/.settings.xml index 6066f6436c..72efc5c4d6 100644 --- a/.settings.xml +++ b/.settings.xml @@ -3,27 +3,44 @@ repo.spring.io - ${env.CI_DEPLOY_USERNAME} - ${env.CI_DEPLOY_PASSWORD} + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} + + + spring-snapshots + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} + + + spring-milestones + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} + + + spring-staging + ${env.ARTIFACTORY_USERNAME} + ${env.ARTIFACTORY_PASSWORD} - spring true + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -31,25 +48,29 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false - spring-releases - Spring Releases - https://repo.spring.io/release - - false - + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -57,11 +78,15 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + diff --git a/.springjavaformatconfig b/.springjavaformatconfig new file mode 100644 index 0000000000..db822775c0 --- /dev/null +++ b/.springjavaformatconfig @@ -0,0 +1 @@ +java-baseline=17 \ No newline at end of file diff --git a/spring-cloud-dataflow-registry/.jdk8 b/.trivyignore similarity index 100% rename from spring-cloud-dataflow-registry/.jdk8 rename to .trivyignore diff --git a/.vscode/launch.json b/.vscode/launch.json index 14d8a33dca..d358781a73 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -29,6 +29,23 @@ "mainClass": "org.springframework.cloud.dataflow.server.single.DataFlowServerApplication", "projectName": "spring-cloud-dataflow-server", "args": "--spring.config.additional-location=src/config/scdf-mysql.yml" + }, + { + "type": "java", + "name": "SCDF Debug Attach", + "request": "attach", + "hostName": "localhost", + "port": 5005 + }, + { + "type": "java", + "name": "SKIPPER default", + "request": "launch", + "cwd": "${workspaceFolder}", + "console": "internalConsole", + "mainClass": "org.springframework.cloud.skipper.server.app.SkipperServerApplication", + "projectName": "spring-cloud-skipper-server", + "args": "" } ] } \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 9f69c44e6b..9004a86ac8 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,11 @@ { - "java.configuration.maven.userSettings": ".settings.xml" + "java.completion.importOrder": [ + "java", + "javax", + "", + "org.springframework", + "#" + ], + "java.configuration.maven.userSettings": ".settings.xml", + "java.jdt.ls.vmargs": "-XX:+UseParallelGC -XX:GCTimeRatio=4 -XX:AdaptiveSizePolicyWeight=90 -Dsun.zip.disableMemoryMapping=true -Xmx4G -Xms100m -Xlog:disable" } \ No newline at end of file diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc deleted file mode 100644 index 17783c7c06..0000000000 --- a/CODE_OF_CONDUCT.adoc +++ /dev/null @@ -1,44 +0,0 @@ -= Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering an open -and welcoming community, we pledge to respect all people who contribute through reporting -issues, posting feature requests, updating documentation, submitting pull requests or -patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for -everyone, regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, body size, race, ethnicity, age, -religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct - -Project maintainers have the right and responsibility to remove, edit, or reject comments, -commits, code, wiki edits, issues, and other contributions that are not aligned to this -Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors -that they deem inappropriate, threatening, offensive, or harmful. - -By adopting this Code of Conduct, project maintainers commit themselves to fairly and -consistently applying these principles to every aspect of managing this project. Project -maintainers who do not follow or enforce the Code of Conduct may be permanently removed -from the project team. - -This Code of Conduct applies both within project spaces and in public spaces when an -individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by -contacting a project maintainer at spring-code-of-conduct@pivotal.io . All complaints will -be reviewed and investigated and will result in a response that is deemed necessary and -appropriate to the circumstances. Maintainers are obligated to maintain confidentiality -with regard to the reporter of an incident. - -This Code of Conduct is adapted from the -https://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at -https://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/] diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc new file mode 100755 index 0000000000..ec78b76c47 --- /dev/null +++ b/CONTRIBUTING.adoc @@ -0,0 +1,48 @@ += Contributing to Spring Cloud Dataflow + +:github: https://github.com/spring-cloud/spring-cloud-dataflow + +Spring Cloud Dataflow is released under the Apache 2.0 license. If you would like to contribute something, or want to hack on the code this document should help you get started. + + + +== Using GitHub Issues +We use GitHub issues to track bugs and enhancements. +If you have a general usage question please ask on https://stackoverflow.com[Stack Overflow]. +The Spring Cloud Dataflow team and the broader community monitor the https://stackoverflow.com/tags/spring-cloud-dataflow[`spring-cloud-dataflow`] tag. + +If you are reporting a bug, please help to speed up problem diagnosis by providing as much information as possible. +Ideally, that would include a small sample project that reproduces the problem. + + + +== Reporting Security Vulnerabilities +If you think you have found a security vulnerability in Spring Cloud Data Flow please *DO NOT* disclose it publicly until we've had a chance to fix it. +Please don't report security vulnerabilities using GitHub issues, instead head over to https://spring.io/security-policy and learn how to disclose them responsibly. + + + +== Developer Certificate of Origin +All commits must include a **Signed-off-by** trailer at the end of each commit message to indicate that the contributor agrees to the Developer Certificate of Origin. +For additional details, please refer to the blog post https://spring.io/blog/2025/01/06/hello-dco-goodbye-cla-simplifying-contributions-to-spring[Hello DCO, Goodbye CLA: Simplifying Contributions to Spring]. + + +=== Code Conventions and Housekeeping + +None of the following guidelines is essential for a pull request, but they all help your fellow developers understand and work with your code. +They can also be added after the original pull request but before a merge. + +* Use the Spring Framework code format conventions. If you use Eclipse, you can import formatter settings by using the `eclipse-code-formatter.xml` file from the https://github.com/spring-cloud/spring-cloud-build/blob/master/spring-cloud-dependencies-parent/eclipse-code-formatter.xml[Spring Cloud Build] project. +If you use IntelliJ, you can use the https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter Plugin] to import the same file. +* Make sure all new `.java` files have a simple Javadoc class comment with at least an `@author` tag identifying you, and preferably at least a paragraph describing the class's purpose. +* Add the ASF license header comment to all new `.java` files (to do so, copy it from existing files in the project). +* Add yourself as an `@author` to the .java files that you modify substantially (more than cosmetic changes). +* Add some Javadocs and, if you change the namespace, some XSD doc elements. +* A few unit tests would help a lot as well. Someone has to do it, and your fellow developers appreciate the effort. +* If no one else uses your branch, rebase it against the current master (or other target branch in the main project). +* When writing a commit message, follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions]. +If you fix an existing issue, add `Fixes gh-XXXX` (where XXXX is the issue number) at the end of the commit message. + + +== Working with the Code +For information on editing, building, and testing the code, see the link:${github}/wiki/Working-with-the-Code[Working with the Code] page on the project wiki. diff --git a/README.md b/README.md index e27217b2c1..c80c2588e0 100644 --- a/README.md +++ b/README.md @@ -4,21 +4,11 @@

-

- - Latest Release Version - - - Latest Snapshot Version - -
- - Build Status - -

+# Spring Cloud Data Flow is no longer maintained as an open-source project by Broadcom, Inc. + +## For information about extended support or commercial options for Spring Cloud Data Flow, please read the official blog post [here](https://spring.io/blog/2025/04/21/spring-cloud-data-flow-commercial). + + *Spring Cloud Data Flow* is a microservices-based toolkit for building streaming and batch data processing pipelines in Cloud Foundry and Kubernetes. @@ -42,7 +32,7 @@ Familiarize yourself with the Spring Cloud Data Flow [architecture](https://data and [feature capabilities](https://dataflow.spring.io/features/). **Deployer SPI**: A Service Provider Interface (SPI) is defined in the [Spring Cloud Deployer](https://github.com/spring-cloud/spring-cloud-deployer) -project. The Deployer SPI provides an abstraction layer for deploying the apps for a given streaming or batch data pipeline, +project. The Deployer SPI provides an abstraction layer for deploying the apps for a given streaming or batch data pipeline and managing the application lifecycle. Spring Cloud Deployer Implementations: @@ -66,24 +56,13 @@ For example, if relying on Maven coordinates, an application URI would be of the connects to the Spring Cloud Data Flow Server's REST API and supports a DSL that simplifies the process of defining a stream or task and managing its lifecycle. -**Community Implementations**: There are also community maintained Spring Cloud Data Flow implementations that are currently -based on the 1.7.x series of Spring Cloud Data Flow. - - * [HashiCorp Nomad](https://github.com/donovanmuller/spring-cloud-dataflow-server-nomad) - * [OpenShift](https://github.com/donovanmuller/spring-cloud-dataflow-server-openshift) - * [Apache Mesos](https://github.com/trustedchoice/spring-cloud-dataflow-server-mesos) - -The [Apache YARN](https://github.com/spring-cloud/spring-cloud-dataflow-server-yarn) implementation has reached end-of-line -status. Let us know at [Gitter](https://gitter.im/spring-cloud/spring-cloud-dataflow) if you are interested in forking -the project to continue developing and maintaining it. - ---- ## Building Clone the repo and type - $ ./mvnw clean install + $ ./mvnw -s .settings.xml clean install Looking for more information? Follow this [link](https://github.com/spring-cloud/spring-cloud-dataflow/blob/master/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc). @@ -94,13 +73,52 @@ By default Git will change the line-endings during checkout to `CRLF`. This is, as this may lead to test failures under Windows. Therefore, please ensure that you set Git property `core.autocrlf` to `false`, e.g. using: `$ git config core.autocrlf false`. -Fore more information please refer to the [Git documentation, Formatting and Whitespace](https://git-scm.com/book/en/v2/Customizing-Git-Git-Configuration). +For more information please refer to the [Git documentation, Formatting and Whitespace](https://git-scm.com/book/en/v2/Customizing-Git-Git-Configuration). + +---- + +## Running Locally w/ Oracle +By default, the Dataflow server jar does not include the Oracle database driver dependency. +If you want to use Oracle for development/testing when running locally, you can specify the `local-dev-oracle` Maven profile when building. +The following command will include the Oracle driver dependency in the jar: +``` +$ ./mvnw -s .settings.xml clean package -Plocal-dev-oracle +``` +You can follow the steps in the [Oracle on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/Oracle-on-Mac-ARM64#run-container-in-docker) Wiki to run Oracle XE locally in Docker with Dataflow pointing at it. + +> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima + +---- + +## Running Locally w/ Microsoft SQL Server +By default, the Dataflow server jar does not include the MSSQL database driver dependency. +If you want to use MSSQL for development/testing when running locally, you can specify the `local-dev-mssql` Maven profile when building. +The following command will include the MSSQL driver dependency in the jar: +``` +$ ./mvnw -s .settings.xml clean package -Plocal-dev-mssql +``` +You can follow the steps in the [MSSQL on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/MSSQL-on-Mac-ARM64#running-dataflow-locally-against-mssql) Wiki to run MSSQL locally in Docker with Dataflow pointing at it. + +> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima + +---- + +## Running Locally w/ IBM DB2 +By default, the Dataflow server jar does not include the DB2 database driver dependency. +If you want to use DB2 for development/testing when running locally, you can specify the `local-dev-db2` Maven profile when building. +The following command will include the DB2 driver dependency in the jar: +``` +$ ./mvnw -s .settings.xml clean package -Plocal-dev-db2 +``` +You can follow the steps in the [DB2 on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/DB2-on-Mac-ARM64#running-dataflow-locally-against-db2) Wiki to run DB2 locally in Docker with Dataflow pointing at it. + +> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima ---- ## Contributing -We welcome contributions! Follow this [link](https://github.com/spring-cloud/spring-cloud-dataflow/blob/master/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc) for more information on how to contribute. +We welcome contributions! See the [CONTRIBUTING](./CONTRIBUTING.adoc) guide for details. ---- diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..8a9410d248 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +# Security Policy +## Reporting a Vulnerability + +If you think you have found a security vulnerability, please **DO NOT** disclose it publicly until we’ve had a chance to fix it. +Please don’t report security vulnerabilities using GitHub issues, instead head over to https://spring.io/security-policy and learn how to disclose them responsibly. diff --git a/build-carvel-package.sh b/build-carvel-package.sh new file mode 100755 index 0000000000..d25d6d4f5f --- /dev/null +++ b/build-carvel-package.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +function create_and_clear() { + rm -rf "$1" + mkdir -p "$1" +} + +SCDIR=$(realpath $(dirname "$(readlink -f "${BASH_SOURCE[0]}")")) +set -euxo pipefail +pushd $SCDIR > /dev/null +./mvnw help:evaluate -Dexpression=project.version > /dev/null +export DATAFLOW_VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) +export SKIPPER_VERSION=$(./mvnw help:evaluate -Dexpression=spring-cloud-skipper.version -pl spring-cloud-dataflow-parent -q -DforceStdout) + +if [ "$PACKAGE_VERSION" = "" ]; then + export PACKAGE_VERSION=$DATAFLOW_VERSION +fi + +# you can launch a local docker registry using docker run -d -p 5000:5000 --name registry registry:2.7 +# export REPO_PREFIX=":5000/" +readonly REPO_PREFIX="${REPO_PREFIX:-docker.io/}" + +export PACKAGE_BUNDLE_REPOSITORY="${REPO_PREFIX}springcloud/scdf-oss-package" +export REPOSITORY_BUNDLE="${REPO_PREFIX}springcloud/scdf-oss-repo" + +export SKIPPER_REPOSITORY="springcloud/spring-cloud-skipper-server" +export SERVER_REPOSITORY="springcloud/spring-cloud-dataflow-server" +export CTR_VERSION=$DATAFLOW_VERSION +export PACKAGE_NAME="scdf" +export PACKAGE_BUNDLE_TEMPLATE="src/carvel/templates/bundle/package" +export IMGPKG_LOCK_TEMPLATE="src/carvel/templates/imgpkg" +export VENDIR_SRC_IN="src/carvel/config" +export SERVER_VERSION="$DATAFLOW_VERSION" + +export PACKAGE_BUNDLE_GENERATED=/tmp/generated/packagebundle +export IMGPKG_LOCK_GENERATED_IN=/tmp/generated/imgpkgin +export IMGPKG_LOCK_GENERATED_OUT=/tmp/generated/imgpkgout +create_and_clear $PACKAGE_BUNDLE_GENERATED +create_and_clear $IMGPKG_LOCK_GENERATED_IN +create_and_clear $IMGPKG_LOCK_GENERATED_OUT + +echo "bundle-path=$PACKAGE_BUNDLE_GENERATED" +export SCDF_DIR="$SCDIR" + +sh "$SCDIR/.github/actions/build-package-bundle/build-package-bundle.sh" + +imgpkg push --bundle "$PACKAGE_BUNDLE_REPOSITORY:$PACKAGE_VERSION" --file "$PACKAGE_BUNDLE_GENERATED" + +export REPO_BUNDLE_TEMPLATE="src/carvel/templates/bundle/repo" + +export REPO_BUNDLE_RENDERED=/tmp/generated/reporendered +export REPO_BUNDLE_GENERATED=/tmp/generated/repobundle +create_and_clear $REPO_BUNDLE_RENDERED +create_and_clear $REPO_BUNDLE_GENERATED + +sh "$SCDIR/.github/actions/build-repository-bundle/build-repository-bundle.sh" + +imgpkg push --bundle "$REPOSITORY_BUNDLE:$PACKAGE_VERSION" --file "$REPO_BUNDLE_GENERATED" + +popd diff --git a/build-containers.sh b/build-containers.sh new file mode 100755 index 0000000000..aec91b44aa --- /dev/null +++ b/build-containers.sh @@ -0,0 +1,3 @@ +#!/bin/bash +./mvnw install -s .settings.xml -DskipTests -T 1C -am -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server,:spring-cloud-dataflow-composed-task-runner,:spring-cloud-dataflow-tasklauncher-sink-rabbit,:spring-cloud-dataflow-tasklauncher-sink-kafka -B --no-transfer-progress +./mvnw spring-boot:build-image -s .settings.xml -DskipTests -T 1C -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server,:spring-cloud-dataflow-composed-task-runner,:spring-cloud-dataflow-tasklauncher-sink-rabbit,:spring-cloud-dataflow-tasklauncher-sink-kafka -B --no-transfer-progress \ No newline at end of file diff --git a/lib/spring-doc-resources-0.2.5.zip b/lib/spring-doc-resources-0.2.5.zip new file mode 100644 index 0000000000..b1ff602652 Binary files /dev/null and b/lib/spring-doc-resources-0.2.5.zip differ diff --git a/models/batch4-5-simple.adoc b/models/batch4-5-simple.adoc new file mode 100644 index 0000000000..3ee7cdd389 --- /dev/null +++ b/models/batch4-5-simple.adoc @@ -0,0 +1,16 @@ += Simple solution + +* SchemaTarget Selection represents a set of schema version, prefix and name. +* Boot 2 is default and task and batch will remain as current. +* Boot 3 task and batch tables will have the same prefix BOOT3_ +* Data flow server will set the properties for prefixes for task and batch. +* Registration will require Schema (Boot2, Boot3) selection indicator. +* At task launch data flow server will create an entry in the correct task-exectution table and sequence mechanism with given prefix based on registration of task. +* Ability to disable Boot 3 support. The feature endpoint will include this indicator. +* The endpoints to list job and task executions will have to accept the BootVersion as an query parameter when it is absent is implies the default condition. `http://localhost:9393/tasks/executions{?schemaTarget}` +* When using the shell to list executions it will be an optional parameter `--schema-target=boot3` +* When viewing the Task Execution list or Job Execution list there will be a drop-down with the option of Default and Boot3. +* The each item in the list of executions do include links to retrieve the entity, and will be encoded with the schemaTarget by the resource assembler. + +* The UI only needs to add the drop-downs and passing selection into the query. +* The user will not have to do anything extra when creating composed tasks. diff --git a/pom.xml b/pom.xml index 04b477abab..a2ee37d1ce 100644 --- a/pom.xml +++ b/pom.xml @@ -1,26 +1,24 @@ - + 4.0.0 - spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + org.springframework.cloud + spring-cloud-dataflow + 3.0.0-SNAPSHOT + spring-cloud-dataflow + Spring Cloud Dataflow pom https://cloud.spring.io/spring-cloud-dataflow/ Pivotal Software, Inc. https://www.spring.io - - org.springframework.cloud - spring-cloud-dataflow-build - 2.8.0-SNAPSHOT - - Apache License, Version 2.0 https://www.apache.org/licenses/LICENSE-2.0 - Copyright 2014-2020 the original author or authors. + Copyright 2014-2021 the original author or authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -51,404 +49,92 @@ https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors - - 1.8 - -Xdoclint:none - - 3.1.0-SNAPSHOT - - 2.8.0-SNAPSHOT - 2.6.0-SNAPSHOT - 2.6.0-SNAPSHOT - 2.6.0-SNAPSHOT - 2.6.0-SNAPSHOT - - 4.10.2 - - 2.7.0-SNAPSHOT - - 2.3.0 - - - 2.3.7.RELEASE - - 2.2.2.RELEASE - - 3.1.3.RELEASE - - 1.6.0-SNAPSHOT - - 1.2.0.RELEASE - 2.4 - 2.4 - - 0.8.5 - 3.0.2 - 2.2.0 - 1.5.5 - 0.5 - 2.11.1 - 3.0.2 - 1.2.1 - 2.10.6 - 1.11.731 - 1.8 - 1.15.2 - + spring-cloud-dataflow-build + + spring-cloud-dataflow-common + spring-cloud-common-security-config + spring-cloud-dataflow-parent spring-cloud-dataflow-container-registry spring-cloud-dataflow-configuration-metadata spring-cloud-dataflow-core-dsl spring-cloud-dataflow-core - spring-cloud-dataflow-registry - spring-cloud-dataflow-rest-resource - spring-cloud-dataflow-composed-task-runner spring-cloud-dataflow-server-core + spring-cloud-dataflow-rest-resource + spring-cloud-dataflow-audit + spring-cloud-dataflow-registry + spring-cloud-dataflow-platform-kubernetes + spring-cloud-dataflow-platform-cloudfoundry spring-cloud-dataflow-autoconfigure - spring-cloud-dataflow-server spring-cloud-dataflow-rest-client spring-cloud-dataflow-shell spring-cloud-dataflow-shell-core - spring-cloud-dataflow-classic-docs - spring-cloud-dataflow-docs spring-cloud-dataflow-completion - spring-cloud-dataflow-dependencies - spring-cloud-dataflow-platform-kubernetes - spring-cloud-dataflow-platform-cloudfoundry + spring-cloud-skipper spring-cloud-starter-dataflow-server spring-cloud-starter-dataflow-ui - spring-cloud-dataflow-audit + spring-cloud-dataflow-server + spring-cloud-dataflow-tasklauncher + spring-cloud-dataflow-single-step-batch-job + spring-cloud-dataflow-composed-task-runner spring-cloud-dataflow-test + spring-cloud-dataflow-dependencies + spring-cloud-dataflow-classic-docs + spring-cloud-dataflow-docs + spring-cloud-dataflow-package - - - - org.springframework.cloud - spring-cloud-dataflow-common-dependencies - ${spring-cloud-dataflow-common.version} - pom - import - - - org.springframework.cloud - spring-cloud-task-dependencies - ${spring-cloud-task.version} - pom - import - - - org.springframework.cloud - spring-cloud-skipper-dependencies - ${spring-cloud-skipper.version} - pom - import - - - org.springframework.cloud - spring-cloud-dataflow-dependencies - 2.8.0-SNAPSHOT - pom - import - - - org.testcontainers - testcontainers-bom - ${testcontainers.version} - pom - import - - - org.springframework.cloud - spring-cloud-dataflow-ui - ${spring-cloud-dataflow-ui.version} - - - org.springframework.cloud - spring-cloud-deployer-spi - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-resource-support - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-resource-maven - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-resource-docker - ${spring-cloud-deployer.version} - - - org.springframework.cloud - spring-cloud-deployer-local - ${spring-cloud-deployer-local.version} - - - org.springframework.cloud - spring-cloud-deployer-cloudfoundry - ${spring-cloud-deployer-cloudfoundry.version} - - - io.pivotal.cfenv - java-cfenv - ${java-cfenv-boot.version} - - - io.pivotal.cfenv - java-cfenv-boot - ${java-cfenv-boot.version} - - - io.pivotal.cfenv - java-cfenv-boot-pivotal-sso - ${java-cfenv-boot.version} - - - io.pivotal.spring.cloud - spring-cloud-services-starter-config-client - ${spring-cloud-services-starter-config-client.version} - - - org.springframework.shell - spring-shell - ${spring-shell.version} - - - org.springframework.cloud - spring-cloud-starter-common-security-config-web - ${spring-cloud-common-security-config.version} - - - commons-io - commons-io - ${commons-io.version} - - - commons-lang - commons-lang - ${commons-lang.version} - - - io.fabric8 - kubernetes-client - ${kubernetes-client.version} - - - org.springframework.cloud - spring-cloud-deployer-kubernetes - ${spring-cloud-deployer-kubernetes.version} - - - org.apache.directory.server - apacheds-protocol-ldap - ${apache-directory-server.version} - - - io.codearte.props2yaml - props2yaml - ${codearte-props2yml.version} - - - org.springframework.security.oauth - spring-security-oauth2 - ${spring-security-oauth2.version} - - - net.javacrumbs.json-unit - json-unit-assertj - ${json-unit.version} - - - com.google.code.findbugs - jsr305 - ${findbugs.version} - - - io.micrometer.prometheus - prometheus-rsocket-spring - ${prometheus-rsocket-spring.version} - - - joda-time - joda-time - ${joda-time.version} - - - org.apache.commons - commons-text - ${commons-text.version} - - - com.amazonaws - aws-java-sdk-ecr - ${aws-java-sdk-ecr.version} - - - + + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + + + + groovy-plugins-release + https://groovy.jfrog.io/artifactory/plugins-release + + org.apache.maven.plugins - maven-surefire-plugin - 2.22.1 + maven-compiler-plugin + 3.13.0 - - **/*Tests.java - **/*Test.java - - - **/Abstract*.java - - - ${argLine} + true + 17 - org.jacoco - jacoco-maven-plugin + org.codehaus.gmaven + groovy-maven-plugin + 2.1.1 + + + org.apache.groovy + groovy + 4.0.23 + pom + + - agent - - prepare-agent - - - - report - test + validate - report + execute + + + ${project.basedir} + + ${project.basedir}/src/test/groovy/check-pom.groovy + - - org.apache.maven.plugins - maven-checkstyle-plugin - - - - - org.springframework.boot - spring-boot-maven-plugin - ${spring-boot.version} - - - org.sonarsource.scanner.maven - sonar-maven-plugin - ${sonar-maven-plugin.version} - - - org.jacoco - jacoco-maven-plugin - ${jacoco-maven-plugin.version} - - - org.apache.maven.plugins - maven-jar-plugin - 3.0.2 - - - org.apache.maven.plugins - maven-source-plugin - 3.0.1 - - - - - - - org.apache.maven.plugins - maven-jxr-plugin - 2.5 - - - - - - deploymentfiles - - - - maven-resources-plugin - - - replace-deployment-files - process-resources - - copy-resources - - - true - ${basedir}/src - - - ${basedir}/src/templates - - **/* - - true - - - - - - - - - - - spring - - - spring-snapshots - Spring Snapshots - https://repo.spring.io/libs-snapshot - - true - - - - spring-milestones - Spring Milestones - https://repo.spring.io/libs-milestone-local - - false - - - - spring-releases - Spring Releases - https://repo.spring.io/release - - false - - - - - - spring-snapshots - Spring Snapshots - https://repo.spring.io/libs-snapshot-local - - true - - - - spring-milestones - Spring Milestones - https://repo.spring.io/libs-milestone-local - - false - - - - - diff --git a/run-integration-test.sh b/run-integration-test.sh new file mode 100755 index 0000000000..76e7657128 --- /dev/null +++ b/run-integration-test.sh @@ -0,0 +1,3 @@ +#!/bin/bash +GROUP=$1 +./mvnw verify -s .settings.xml -Dgroups="$GROUP" -Pfailsafe -pl :spring-cloud-dataflow-server -B --no-transfer-progress diff --git a/run-integration-tests.sh b/run-integration-tests.sh new file mode 100755 index 0000000000..d13d22eeef --- /dev/null +++ b/run-integration-tests.sh @@ -0,0 +1,6 @@ +#!/bin/bash +SCDIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +pushd $SCDIR +./build-containers.sh +./run-integration-test.sh "mariadb,postgres,performance,oauth" + diff --git a/spring-cloud-common-security-config/README.md b/spring-cloud-common-security-config/README.md new file mode 100644 index 0000000000..5466106ed9 --- /dev/null +++ b/spring-cloud-common-security-config/README.md @@ -0,0 +1,3 @@ +# Spring Cloud Common Security + +This repo holds the security configuration classes that are common across Spring Cloud (Spring Cloud Data Flow/Skipper for now) projects that use **Role** based authentication/authorization for their runtime server application(s). diff --git a/spring-cloud-common-security-config/pom.xml b/spring-cloud-common-security-config/pom.xml new file mode 100644 index 0000000000..585167a54d --- /dev/null +++ b/spring-cloud-common-security-config/pom.xml @@ -0,0 +1,23 @@ + + + 4.0.0 + spring-cloud-common-security-config + 3.0.0-SNAPSHOT + pom + spring-cloud-common-security-config + Spring Cloud Common Security Config + + + org.springframework.cloud + spring-cloud-dataflow-build + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-build + + + + spring-cloud-common-security-config-core + spring-cloud-common-security-config-web + spring-cloud-starter-common-security-config-web + + + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml new file mode 100644 index 0000000000..4ec7bf0d21 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml @@ -0,0 +1,28 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-common-security-config + 3.0.0-SNAPSHOT + + spring-cloud-common-security-config-core + spring-cloud-common-security-config-core + Spring Cloud Common Security Config Core + jar + + true + + + + org.springframework.security + spring-security-oauth2-client + + + org.springframework.boot + spring-boot-starter-test + test + + + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java new file mode 100644 index 0000000000..33bce77f53 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java @@ -0,0 +1,74 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.core.support; + +import java.io.IOException; + +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpRequest; +import org.springframework.http.client.ClientHttpRequestExecution; +import org.springframework.http.client.ClientHttpRequestInterceptor; +import org.springframework.http.client.ClientHttpResponse; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.util.Assert; + +/** + * This implementation of a {@link ClientHttpRequestInterceptor} will retrieve, if available, the OAuth2 Access Token + * and add it to the {@code Authorization} HTTP header. + * + * @author Gunnar Hillert + */ +public class OAuth2AccessTokenProvidingClientHttpRequestInterceptor implements ClientHttpRequestInterceptor { + + private final String staticOauthAccessToken; + + private final OAuth2TokenUtilsService oauth2TokenUtilsService; + + public OAuth2AccessTokenProvidingClientHttpRequestInterceptor(String staticOauthAccessToken) { + super(); + Assert.hasText(staticOauthAccessToken, "staticOauthAccessToken must not be null or empty."); + this.staticOauthAccessToken = staticOauthAccessToken; + this.oauth2TokenUtilsService = null; + } + + public OAuth2AccessTokenProvidingClientHttpRequestInterceptor(OAuth2TokenUtilsService oauth2TokenUtilsService) { + super(); + this.oauth2TokenUtilsService = oauth2TokenUtilsService; + this.staticOauthAccessToken = null; + } + + @Override + public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution) + throws IOException { + + final String tokenToUse; + + if (this.staticOauthAccessToken != null) { + tokenToUse = this.staticOauthAccessToken; + } + else if (this.oauth2TokenUtilsService != null){ + tokenToUse = this.oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + } + else { + tokenToUse = null; + } + + if (tokenToUse != null) { + request.getHeaders().add(HttpHeaders.AUTHORIZATION, OAuth2AccessToken.TokenType.BEARER.getValue() + " " + tokenToUse); + } + return execution.execute(request, body); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java new file mode 100644 index 0000000000..f03ba97f8a --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.core.support; + +import org.springframework.security.core.Authentication; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; + +/** + * Service providing OAuth2 Security-related utility methods that may + * required other Spring Security services. + * + * @author Gunnar Hillert + * @author Corneil du Plessis + * + */ +public interface OAuth2TokenUtilsService { + + /** + * Retrieves the access token from the {@link Authentication} implementation. + * + * @return Should never return null. + */ + String getAccessTokenOfAuthenticatedUser(); + + /** + * + * @return A client for the token. + */ + OAuth2AuthorizedClient getAuthorizedClient(OAuth2AuthenticationToken auth2AuthenticationToken); + + /** + * + * @param auth2AuthorizedClient Remove a client + */ + void removeAuthorizedClient(OAuth2AuthorizedClient auth2AuthorizedClient); + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java new file mode 100644 index 0000000000..d92948c524 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java @@ -0,0 +1,107 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.core.support; + +import java.io.IOException; +import java.util.Collections; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpRequest; +import org.springframework.http.client.ClientHttpRequestExecution; +import org.springframework.test.util.ReflectionTestUtils; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.entry; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +/** + * + * @author Gunnar Hillert + * @author Corneil du Plessis + */ +class OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests { + + @Test + void oAuth2AccessTokenProvidingClientHttpRequestInterceptorWithEmptyConstructior() { + assertThatThrownBy(() -> new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("staticOauthAccessToken must not be null or empty."); + } + + @Test + void oAuth2AccessTokenProvidingClientHttpRequestInterceptorWithStaticTokenConstructor() { + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar"); + + final String accessToken = (String) ReflectionTestUtils.getField(interceptor, "staticOauthAccessToken"); + assertThat(accessToken).isEqualTo("foobar"); + } + + @Test + void interceptWithStaticToken() throws IOException { + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar"); + final HttpHeaders headers = setupTest(interceptor); + + assertThat(headers) + .hasSize(1) + .contains(entry("Authorization", Collections.singletonList("Bearer foobar"))); + } + + @Test + void interceptWithAuthentication() throws IOException { + final OAuth2TokenUtilsService oauth2TokenUtilsService = mock(OAuth2TokenUtilsService.class); + when(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).thenReturn("foo-bar-123-token"); + + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor(oauth2TokenUtilsService); + final HttpHeaders headers = setupTest(interceptor); + + assertThat(headers) + .hasSize(1) + .contains(entry("Authorization", Collections.singletonList("Bearer foo-bar-123-token"))); + } + + @Test + void interceptWithAuthenticationAndStaticToken() throws IOException { + final OAuth2TokenUtilsService oauth2TokenUtilsService = mock(OAuth2TokenUtilsService.class); + when(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).thenReturn("foo-bar-123-token"); + + final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor = + new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar"); + final HttpHeaders headers = setupTest(interceptor); + + assertThat(headers) + .hasSize(1) + .contains(entry("Authorization", Collections.singletonList("Bearer foobar"))); + } + + private HttpHeaders setupTest( OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor) throws IOException { + final HttpRequest request = Mockito.mock(HttpRequest.class); + final ClientHttpRequestExecution clientHttpRequestExecution = Mockito.mock(ClientHttpRequestExecution.class); + final HttpHeaders headers = new HttpHeaders(); + + when(request.getHeaders()).thenReturn(headers); + interceptor.intercept(request, null, clientHttpRequestExecution); + verify(clientHttpRequestExecution, Mockito.times(1)).execute(request, null); + return headers; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml new file mode 100644 index 0000000000..23093553be --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml @@ -0,0 +1,80 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-common-security-config + 3.0.0-SNAPSHOT + + spring-cloud-common-security-config-web + spring-cloud-common-security-config-web + Spring Cloud Common Security Config Web + jar + + true + + + + org.springframework.cloud + spring-cloud-common-security-config-core + ${project.version} + + + org.springframework.security + spring-security-oauth2-jose + + + org.springframework.security + spring-security-oauth2-resource-server + + + org.springframework + spring-webflux + + + io.projectreactor.netty + reactor-netty + + + jakarta.servlet + jakarta.servlet-api + + + org.springframework.boot + spring-boot-starter-security + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.session + spring-session-core + + + org.springframework.boot + spring-boot-starter-actuator + + + org.springframework.boot + spring-boot-starter-test + test + + + com.squareup.okhttp3 + okhttp + test + + + com.squareup.okhttp3 + mockwebserver + test + + + junit + junit + test + + + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java new file mode 100644 index 0000000000..8efea5f00e --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java @@ -0,0 +1,142 @@ +/* + * Copyright 2016-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Holds configuration for the authorization aspects of security. + * + * @author Eric Bottard + * @author Gunnar Hillert + * @author Ilayaperumal Gopinathan + * @author Mike Heath + */ +public class AuthorizationProperties { + + private String externalAuthoritiesUrl; + + private List rules = new ArrayList<>(); + + private String dashboardUrl = "/dashboard"; + + private String loginUrl = "/#/login"; + + private String loginProcessingUrl = "/login"; + + private String logoutUrl = "/logout"; + + private String logoutSuccessUrl = "/logout-success.html"; + + private List permitAllPaths = new ArrayList<>(); + + private List authenticatedPaths = new ArrayList<>(); + + /** + * Role-mapping configuration per OAuth2 provider. + */ + private final Map providerRoleMappings = new HashMap<>(); + + private String defaultProviderId; + + public Map getProviderRoleMappings() { + return providerRoleMappings; + } + + public List getRules() { + return rules; + } + + public void setRules(List rules) { + this.rules = rules; + } + + public String getExternalAuthoritiesUrl() { + return externalAuthoritiesUrl; + } + + public void setExternalAuthoritiesUrl(String externalAuthoritiesUrl) { + this.externalAuthoritiesUrl = externalAuthoritiesUrl; + } + + public String getDashboardUrl() { + return dashboardUrl; + } + + public void setDashboardUrl(String dashboardUrl) { + this.dashboardUrl = dashboardUrl; + } + + public String getLoginUrl() { + return loginUrl; + } + + public void setLoginUrl(String loginUrl) { + this.loginUrl = loginUrl; + } + + public String getLoginProcessingUrl() { + return loginProcessingUrl; + } + + public void setLoginProcessingUrl(String loginProcessingUrl) { + this.loginProcessingUrl = loginProcessingUrl; + } + + public String getLogoutUrl() { + return logoutUrl; + } + + public void setLogoutUrl(String logoutUrl) { + this.logoutUrl = logoutUrl; + } + + public String getLogoutSuccessUrl() { + return logoutSuccessUrl; + } + + public void setLogoutSuccessUrl(String logoutSuccessUrl) { + this.logoutSuccessUrl = logoutSuccessUrl; + } + + public List getPermitAllPaths() { + return permitAllPaths; + } + + public void setPermitAllPaths(List permitAllPaths) { + this.permitAllPaths = permitAllPaths; + } + + public List getAuthenticatedPaths() { + return authenticatedPaths; + } + + public void setAuthenticatedPaths(List authenticatedPaths) { + this.authenticatedPaths = authenticatedPaths; + } + + public void setDefaultProviderId(String defaultProviderId) { + this.defaultProviderId = defaultProviderId; + } + + public String getDefaultProviderId() { + return defaultProviderId; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java new file mode 100644 index 0000000000..f3011fff7d --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java @@ -0,0 +1,32 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration; +import org.springframework.boot.autoconfigure.AutoConfigureBefore; +import org.springframework.boot.autoconfigure.security.oauth2.client.servlet.OAuth2ClientAutoConfiguration; +import org.springframework.boot.autoconfigure.security.oauth2.resource.servlet.OAuth2ResourceServerAutoConfiguration; +import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration; +import org.springframework.context.annotation.Configuration; + +@Configuration(proxyBeanMethods = false) +@AutoConfigureBefore({ + SecurityAutoConfiguration.class, + ManagementWebSecurityAutoConfiguration.class, + OAuth2ClientAutoConfiguration.class, + OAuth2ResourceServerAutoConfiguration.class}) +public class CommonSecurityAutoConfiguration { +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java new file mode 100644 index 0000000000..29bb4d4858 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.springframework.cloud.common.security.support.OnOAuth2SecurityDisabled; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.WebSecurityConfigurer; +import org.springframework.security.config.annotation.web.builders.WebSecurity; + +/** + * Spring Security {@link WebSecurityConfigurer} simply ignoring all paths conditionally if security is not enabled. + * + * The org.springframework.cloud.common.security.enabled=true property disables this configuration and + * fall back to the Spring Boot default security configuration. + * + * @author Janne Valkealahti + * @author Gunnar Hillert + * @author Christian Tzolov + * + */ +@Configuration +@Conditional(OnOAuth2SecurityDisabled.class) +public class IgnoreAllSecurityConfiguration implements WebSecurityConfigurer { + + @Override + public void init(WebSecurity builder) { + } + + @Override + public void configure(WebSecurity builder) { + builder.ignoring().requestMatchers("/**"); + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java new file mode 100644 index 0000000000..047eb5ba52 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java @@ -0,0 +1,119 @@ +/* + * Copyright 2016-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.slf4j.LoggerFactory; + +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.authentication.AuthenticationServiceException; +import org.springframework.security.authentication.BadCredentialsException; +import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.AuthenticationException; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; +import org.springframework.security.oauth2.client.endpoint.OAuth2PasswordGrantRequest; +import org.springframework.security.oauth2.client.registration.ClientRegistration; +import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; +import org.springframework.security.oauth2.core.AuthorizationGrantType; +import org.springframework.security.oauth2.core.OAuth2AuthorizationException; +import org.springframework.security.oauth2.core.endpoint.OAuth2AccessTokenResponse; +import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthenticationToken; +import org.springframework.security.oauth2.server.resource.authentication.OpaqueTokenAuthenticationProvider; +import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector; +import org.springframework.web.client.ResourceAccessException; + +/** + * Provides a custom {@link AuthenticationProvider} that allows for authentication + * (username and password) against an OAuth Server using a {@code password grant}. + * + * @author Gunnar Hillert + */ +public class ManualOAuthAuthenticationProvider implements AuthenticationProvider { + + private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManualOAuthAuthenticationProvider.class); + + private final OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient; + private final ClientRegistrationRepository clientRegistrationRepository; + private final AuthenticationProvider authenticationProvider; + private final String providerId; + + public ManualOAuthAuthenticationProvider( + OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient, + ClientRegistrationRepository clientRegistrationRepository, + OpaqueTokenIntrospector opaqueTokenIntrospector, + String providerId) { + + this.oAuth2PasswordTokenResponseClient = oAuth2PasswordTokenResponseClient; + this.clientRegistrationRepository = clientRegistrationRepository; + this.authenticationProvider = + new OpaqueTokenAuthenticationProvider(opaqueTokenIntrospector); + this.providerId = providerId; + } + + @Override + public Authentication authenticate(Authentication authentication) throws AuthenticationException { + final String username = authentication.getName(); + final String password = authentication.getCredentials().toString(); + + final ClientRegistration clientRegistration = clientRegistrationRepository.findByRegistrationId(providerId); + final ClientRegistration clientRegistrationPassword = ClientRegistration.withClientRegistration(clientRegistration).authorizationGrantType(AuthorizationGrantType.PASSWORD).build(); + + final OAuth2PasswordGrantRequest grantRequest = new OAuth2PasswordGrantRequest(clientRegistrationPassword, username, password); + final OAuth2AccessTokenResponse accessTokenResponse; + final String accessTokenUri = clientRegistration.getProviderDetails().getTokenUri(); + + try { + accessTokenResponse = oAuth2PasswordTokenResponseClient.getTokenResponse(grantRequest); + logger.warn("Authenticating user '{}' using accessTokenUri '{}'.", username, accessTokenUri); + } + catch (OAuth2AuthorizationException e) { + if (e.getCause() instanceof ResourceAccessException) { + final String errorMessage = String.format( + "While authenticating user '%s': " + "Unable to access accessTokenUri '%s'.", username, + accessTokenUri); + logger.error(errorMessage + " Error message: {}.", e.getCause().getMessage()); + throw new AuthenticationServiceException(errorMessage, e); + } + else { + throw new BadCredentialsException(String.format("Access denied for user '%s'.", username), e); + } + + } + + final BearerTokenAuthenticationToken authenticationRequest = new BearerTokenAuthenticationToken(accessTokenResponse.getAccessToken().getTokenValue()); + + Authentication newAuthentication = null; + try { + newAuthentication = this.authenticationProvider.authenticate(authenticationRequest); + SecurityContext context = SecurityContextHolder.createEmptyContext(); + context.setAuthentication(newAuthentication); + SecurityContextHolder.setContext(context); + } catch (AuthenticationException failed) { + SecurityContextHolder.clearContext(); + logger.warn("Authentication request for failed!", failed); + //this.authenticationFailureHandler.onAuthenticationFailure(request, response, failed); + } + + return newAuthentication; + } + + @Override + public boolean supports(Class authentication) { + return authentication.equals(UsernamePasswordAuthenticationToken.class); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthClientConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthClientConfiguration.java new file mode 100644 index 0000000000..4ef9ba1aa8 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthClientConfiguration.java @@ -0,0 +1,201 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties; +import org.springframework.boot.autoconfigure.security.oauth2.resource.OAuth2ResourceServerProperties; +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.cloud.common.security.support.AuthoritiesMapper; +import org.springframework.cloud.common.security.support.CustomAuthoritiesOpaqueTokenIntrospector; +import org.springframework.cloud.common.security.support.CustomOAuth2OidcUserService; +import org.springframework.cloud.common.security.support.CustomPlainOAuth2UserService; +import org.springframework.cloud.common.security.support.DefaultAuthoritiesMapper; +import org.springframework.cloud.common.security.support.DefaultOAuth2TokenUtilsService; +import org.springframework.cloud.common.security.support.ExternalOauth2ResourceAuthoritiesMapper; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.authentication.AuthenticationProvider; +import org.springframework.security.authentication.ProviderManager; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientManager; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.endpoint.DefaultPasswordTokenResponseClient; +import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; +import org.springframework.security.oauth2.client.endpoint.OAuth2PasswordGrantRequest; +import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest; +import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserService; +import org.springframework.security.oauth2.client.web.reactive.function.client.ServletOAuth2AuthorizedClientExchangeFilterFunction; +import org.springframework.security.oauth2.core.oidc.user.OidcUser; +import org.springframework.security.oauth2.core.user.OAuth2User; +import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector; +import org.springframework.util.StringUtils; +import org.springframework.web.reactive.function.client.WebClient; + +@Configuration(proxyBeanMethods = false) +public class OAuthClientConfiguration { + + @Configuration(proxyBeanMethods = false) + protected static class OAuth2AccessTokenResponseClientConfig { + @Bean + OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient() { + return new DefaultPasswordTokenResponseClient(); + } + } + + @Configuration(proxyBeanMethods = false) + @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri") + protected static class AuthenticationProviderConfig { + + protected OpaqueTokenIntrospector opaqueTokenIntrospector; + + @Autowired(required = false) + public void setOpaqueTokenIntrospector(OpaqueTokenIntrospector opaqueTokenIntrospector) { + this.opaqueTokenIntrospector = opaqueTokenIntrospector; + } + + @Bean + protected AuthenticationProvider authenticationProvider( + OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient, + ClientRegistrationRepository clientRegistrationRepository, + AuthorizationProperties authorizationProperties, + OAuth2ClientProperties oauth2ClientProperties) { + return new ManualOAuthAuthenticationProvider( + oAuth2PasswordTokenResponseClient, + clientRegistrationRepository, + this.opaqueTokenIntrospector, + calculateDefaultProviderId(authorizationProperties, oauth2ClientProperties)); + + } + } + + @Configuration(proxyBeanMethods = false) + @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri") + protected static class ProviderManagerConfig { + private AuthenticationProvider authenticationProvider; + + @Autowired(required = false) + protected void setAuthenticationProvider(AuthenticationProvider authenticationProvider) { + this.authenticationProvider = authenticationProvider; + } + + @Bean + protected ProviderManager providerManager() { + List providers = new ArrayList<>(); + providers.add(authenticationProvider); + return new ProviderManager(providers); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class OAuth2TokenUtilsServiceConfig { + @Bean + protected OAuth2TokenUtilsService oauth2TokenUtilsService(OAuth2AuthorizedClientService oauth2AuthorizedClientService) { + return new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class AuthoritiesMapperConfig { + + @Bean + protected AuthoritiesMapper authorityMapper(AuthorizationProperties authorizationProperties, + OAuth2ClientProperties oAuth2ClientProperties) { + AuthoritiesMapper authorityMapper; + if (!StringUtils.hasText(authorizationProperties.getExternalAuthoritiesUrl())) { + authorityMapper = new DefaultAuthoritiesMapper( + authorizationProperties.getProviderRoleMappings(), + calculateDefaultProviderId(authorizationProperties, oAuth2ClientProperties)); + } else { + authorityMapper = new ExternalOauth2ResourceAuthoritiesMapper( + URI.create(authorizationProperties.getExternalAuthoritiesUrl())); + } + return authorityMapper; + } + } + + @Configuration(proxyBeanMethods = false) + protected static class OidcUserServiceConfig { + + @Bean + protected OAuth2UserService oidcUserService(AuthoritiesMapper authoritiesMapper) { + return new CustomOAuth2OidcUserService(authoritiesMapper); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class PlainOauth2UserServiceConfig { + + @Bean + protected OAuth2UserService plainOauth2UserService( + AuthoritiesMapper authoritiesMapper) { + return new CustomPlainOAuth2UserService(authoritiesMapper); + } + } + + @Configuration(proxyBeanMethods = false) + @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri") + protected static class OpaqueTokenIntrospectorConfig { + @Bean + protected OpaqueTokenIntrospector opaqueTokenIntrospector(OAuth2ResourceServerProperties oAuth2ResourceServerProperties, + AuthoritiesMapper authoritiesMapper) { + return new CustomAuthoritiesOpaqueTokenIntrospector( + oAuth2ResourceServerProperties.getOpaquetoken().getIntrospectionUri(), + oAuth2ResourceServerProperties.getOpaquetoken().getClientId(), + oAuth2ResourceServerProperties.getOpaquetoken().getClientSecret(), + authoritiesMapper); + } + } + + public static String calculateDefaultProviderId(AuthorizationProperties authorizationProperties, OAuth2ClientProperties oauth2ClientProperties) { + if (authorizationProperties.getDefaultProviderId() != null) { + return authorizationProperties.getDefaultProviderId(); + } + else if (oauth2ClientProperties.getRegistration().size() == 1) { + return oauth2ClientProperties.getRegistration().entrySet().iterator().next() + .getKey(); + } + else if (oauth2ClientProperties.getRegistration().size() > 1 + && !StringUtils.hasText(authorizationProperties.getDefaultProviderId())) { + throw new IllegalStateException("defaultProviderId must be set if more than 1 Registration is provided."); + } + else { + throw new IllegalStateException("Unable to retrieve default provider id."); + } + } + + @Configuration(proxyBeanMethods = false) + protected static class WebClientConfig { + + @Bean + protected WebClient webClient(OAuth2AuthorizedClientManager authorizedClientManager) { + ServletOAuth2AuthorizedClientExchangeFilterFunction oauth2Client = + new ServletOAuth2AuthorizedClientExchangeFilterFunction(authorizedClientManager); + oauth2Client.setDefaultOAuth2AuthorizedClient(true); + return WebClient.builder() + .apply(oauth2Client.oauth2Configuration()) + .build(); + } + } + + +} diff --git a/spring-cloud-dataflow-rest-resource/.jdk8 b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthSecurityConfiguration.java similarity index 100% rename from spring-cloud-dataflow-rest-resource/.jdk8 rename to spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthSecurityConfiguration.java diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java new file mode 100644 index 0000000000..fe679e6bc5 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java @@ -0,0 +1,264 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.cloud.common.security.support.CoreSecurityRoles; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * Holds configuration for the authorization aspects of security. + * + * @author Gunnar Hillert + * + */ +public class ProviderRoleMapping { + + private String oauthScopePrefix = "dataflow."; + private String rolePrefix = "ROLE_"; + private String groupClaim = "roles"; + private boolean mapOauthScopes = false; + private boolean parseOauthScopePathParts = true; + private boolean mapGroupClaims = false; + private Map roleMappings = new HashMap<>(0); + private Map groupMappings = new HashMap<>(0); + private String principalClaimName; + + public ProviderRoleMapping() { + super(); + } + + public ProviderRoleMapping(boolean mapOauthScopes) { + this.mapOauthScopes = mapOauthScopes; + } + + public ProviderRoleMapping(boolean mapOauthScopes, Map roleMappings) { + Assert.notNull(roleMappings, "roleMappings must not be null."); + this.mapOauthScopes = mapOauthScopes; + this.roleMappings = roleMappings; + } + + public boolean isParseOauthScopePathParts() { + return parseOauthScopePathParts; + } + + /** + * Sets whether or not to treat OAuth scopes as URIs during the role mapping. + * When set to {@code true} the OAuth scope will be treated as a URI and the leading part will be ignored (eg. 'api://dataflow-server/dataflow.create' will result in 'dataflow.create'). + * When set to {@code false} the OAuth scope will be used as-is. This is useful in cases where the scope is not a URI and contains '/' leading characters. + * + * @param parseOauthScopePathParts whether or not to treat OAuth scopes as URIs during the role mapping + */ + public void setParseOauthScopePathParts(boolean parseOauthScopePathParts) { + this.parseOauthScopePathParts = parseOauthScopePathParts; + } + + public boolean isMapOauthScopes() { + return mapOauthScopes; + } + + /** + * If set to true, Oauth scopes will be mapped to corresponding Data Flow roles. + * Otherwise, if set to false, or not set at all, all roles will be assigned to users. + * + * @param mapOauthScopes If not set defaults to false + */ + public void setMapOauthScopes(boolean mapOauthScopes) { + this.mapOauthScopes = mapOauthScopes; + } + + public boolean isMapGroupClaims() { + return mapGroupClaims; + } + + public void setMapGroupClaims(boolean mapGroupClaims) { + this.mapGroupClaims = mapGroupClaims; + } + + /** + * When using OAuth2 with enabled {@link #setMapOauthScopes(boolean)}, you can optionally specify a custom + * mapping of OAuth scopes to role names as they exist in the Data Flow application. If not + * set, then the OAuth scopes themselves must match the role names: + * + *
    + *
  • MANAGE = dataflow.manage + *
  • VIEW = dataflow.view + *
  • CREATE = dataflow.create + *
+ * + * @return Optional (May be null). Returns a map of scope-to-role mappings. + */ + public Map getRoleMappings() { + return roleMappings; + } + + public ProviderRoleMapping addRoleMapping(String oauthScope, String roleName) { + this.roleMappings.put(oauthScope, roleName); + return this; + } + + public Map getGroupMappings() { + return groupMappings; + } + + public void setGroupMappings(Map groupMappings) { + this.groupMappings = groupMappings; + } + + public String getGroupClaim() { + return groupClaim; + } + + public void setGroupClaim(String groupClaim) { + this.groupClaim = groupClaim; + } + + public String getPrincipalClaimName() { + return principalClaimName; + } + + public void setPrincipalClaimName(String principalClaimName) { + this.principalClaimName = principalClaimName; + } + + public Map convertGroupMappingKeysToCoreSecurityRoles() { + + final Map groupMappings = new HashMap<>(0); + + if (CollectionUtils.isEmpty(this.groupMappings)) { + for (CoreSecurityRoles roleEnum : CoreSecurityRoles.values()) { + final String roleName = this.oauthScopePrefix + roleEnum.getKey(); + groupMappings.put(roleEnum, roleName); + } + return groupMappings; + } + + final List unmappedRoles = new ArrayList<>(0); + + for (CoreSecurityRoles coreRole : CoreSecurityRoles.values()) { + + final String coreSecurityRoleName; + if (this.rolePrefix.length() > 0 && !coreRole.getKey().startsWith(rolePrefix)) { + coreSecurityRoleName = rolePrefix + coreRole.getKey(); + } + else { + coreSecurityRoleName = coreRole.getKey(); + } + + final String oauthScope = this.groupMappings.get(coreSecurityRoleName); + + if (oauthScope == null) { + unmappedRoles.add(coreRole); + } + else { + groupMappings.put(coreRole, oauthScope); + } + } + + if (!unmappedRoles.isEmpty()) { + throw new IllegalArgumentException( + String.format("The following %s %s not mapped: %s.", + unmappedRoles.size(), + unmappedRoles.size() > 1 ? "roles are" : "role is", + StringUtils.collectionToDelimitedString(unmappedRoles, ", "))); + } + + return groupMappings; + } + + /** + * @return Map containing the {@link CoreSecurityRoles} as key and the associated role name (String) as value. + */ + public Map convertRoleMappingKeysToCoreSecurityRoles() { + + final Map roleMappings = new HashMap<>(0); + + if (CollectionUtils.isEmpty(this.roleMappings)) { + for (CoreSecurityRoles roleEnum : CoreSecurityRoles.values()) { + final String roleName = this.oauthScopePrefix + roleEnum.getKey(); + roleMappings.put(roleEnum, roleName); + } + return roleMappings; + } + + final List unmappedRoles = new ArrayList<>(0); + + for (CoreSecurityRoles coreRole : CoreSecurityRoles.values()) { + + final String coreSecurityRoleName; + if (this.rolePrefix.length() > 0 && !coreRole.getKey().startsWith(rolePrefix)) { + coreSecurityRoleName = rolePrefix + coreRole.getKey(); + } + else { + coreSecurityRoleName = coreRole.getKey(); + } + + final String oauthScope = this.roleMappings.get(coreSecurityRoleName); + + if (oauthScope == null) { + unmappedRoles.add(coreRole); + } + else { + roleMappings.put(coreRole, oauthScope); + } + } + + if (!unmappedRoles.isEmpty()) { + throw new IllegalArgumentException( + String.format("The following %s %s not mapped: %s.", + unmappedRoles.size(), + unmappedRoles.size() > 1 ? "roles are" : "role is", + StringUtils.collectionToDelimitedString(unmappedRoles, ", "))); + } + + return roleMappings; + } + + /** + * Sets the prefix which should be added to the authority name (if it doesn't already + * exist). + * + * @param rolePrefix Must not be null + * + */ + public void setRolePrefix(String rolePrefix) { + Assert.notNull(rolePrefix, "rolePrefix cannot be null"); + this.rolePrefix = rolePrefix; + } + + public String getOauthScopePrefix() { + return oauthScopePrefix; + } + + /** + * + * @param oauthScopePrefix Must not be null + */ + public void setOauthScopePrefix(String oauthScopePrefix) { + Assert.notNull(rolePrefix, "oauthScopePrefix cannot be null"); + this.oauthScopePrefix = oauthScopePrefix; + } + + public String getRolePrefix() { + return rolePrefix; + } +} diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java similarity index 83% rename from spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java rename to spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java index 1260a504bf..458e8c5a6e 100644 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java @@ -13,13 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -package com.acme.boot13; - /** - * An enum used in configuration properties class. + * Contains security related configuration classes. */ -public enum AnotherEnumClass13 { - low, - high; -} +package org.springframework.cloud.common.security; diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java new file mode 100644 index 0000000000..cdf739a8e7 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java @@ -0,0 +1,66 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.io.IOException; + +import jakarta.servlet.ServletException; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.security.core.Authentication; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; +import org.springframework.security.web.authentication.logout.SimpleUrlLogoutSuccessHandler; +import org.springframework.util.Assert; + +/** + * Customized {@link SimpleUrlLogoutSuccessHandler} that will remove the previously authenticated user's + * {@link OAuth2AuthorizedClient} from the underlying {@link OAuth2AuthorizedClientService}. + * + * @author Gunnar Hillert + * @since 1.3.0 + */ +public class AccessTokenClearingLogoutSuccessHandler extends SimpleUrlLogoutSuccessHandler { + + private static final Logger logger = LoggerFactory.getLogger(AccessTokenClearingLogoutSuccessHandler.class); + + final OAuth2TokenUtilsService oauth2TokenUtilsService; + + public AccessTokenClearingLogoutSuccessHandler(OAuth2TokenUtilsService oauth2TokenUtilsService) { + Assert.notNull(oauth2TokenUtilsService, "oauth2TokenUtilsService must not be null."); + this.oauth2TokenUtilsService = oauth2TokenUtilsService; + } + + @Override + public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response, + Authentication authentication) throws IOException, ServletException { + + if (authentication instanceof OAuth2AuthenticationToken) { + final OAuth2AuthenticationToken oauth2AuthenticationToken = (OAuth2AuthenticationToken) authentication; + final OAuth2AuthorizedClient oauth2AuthorizedClient = oauth2TokenUtilsService.getAuthorizedClient(oauth2AuthenticationToken); + oauth2TokenUtilsService.removeAuthorizedClient(oauth2AuthorizedClient); + logger.info("Removed OAuth2AuthorizedClient."); + } + + super.handle(request, response, authentication); + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java new file mode 100644 index 0000000000..70e8be71a3 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.springframework.security.core.GrantedAuthority; + +/** + * Maps scopes and claims into authorities. + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public interface AuthoritiesMapper { + + /** + * Map the provided scopes to authorities. + * + * @param providerId If null, then the default providerId is used + * @param scopes the scopes to map + * @param token some implementation may need to make additional requests + * @return the mapped authorities + */ + Set mapScopesToAuthorities(String providerId, Set scopes, String token); + + /** + * Map the provided claims to authorities. + * + * @param providerId If null, then the default providerId is used + * @param claims the claims to map + * @return the mapped authorities + */ + default Set mapClaimsToAuthorities(String providerId, List claims) { + return Collections.emptySet(); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java new file mode 100644 index 0000000000..c8a3a77206 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java @@ -0,0 +1,77 @@ +/* + * Copyright 2017-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Arrays; + +import org.springframework.util.Assert; + +/** + * Defines the core security roles supported by Spring Cloud Security. + * + * @author Gunnar Hillert + */ +public enum CoreSecurityRoles { + + CREATE("CREATE", "role for create operations"), + DEPLOY("DEPLOY", "role for deploy operations"), + DESTROY("DESTROY", "role for destroy operations"), + MANAGE("MANAGE", "role for the boot management endpoints"), + MODIFY("MODIFY", "role for modify operations"), + SCHEDULE("SCHEDULE", "role for scheduling operations"), + VIEW("VIEW", "view role"); + + private String key; + + private String name; + + CoreSecurityRoles(final String key, final String name) { + this.key = key; + this.name = name; + } + + public static CoreSecurityRoles fromKey(String role) { + + Assert.hasText(role, "Parameter role must not be null or empty."); + + for (CoreSecurityRoles roleType : CoreSecurityRoles.values()) { + if (roleType.getKey().equals(role)) { + return roleType; + } + } + + return null; + } + + /** + * Helper class that will return all role names as a string array. + * + * @return Never null + */ + public static String[] getAllRolesAsStringArray() { + return Arrays.stream(CoreSecurityRoles.values()).map(CoreSecurityRoles::getKey) + .toArray(size -> new String[size]); + } + + public String getKey() { + return key; + } + + public String getName() { + return name; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java new file mode 100644 index 0000000000..dc5ce9aa56 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java @@ -0,0 +1,82 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.oauth2.core.DefaultOAuth2AuthenticatedPrincipal; +import org.springframework.security.oauth2.core.OAuth2AuthenticatedPrincipal; +import org.springframework.security.oauth2.core.OAuth2TokenIntrospectionClaimNames; +import org.springframework.security.oauth2.server.resource.introspection.NimbusOpaqueTokenIntrospector; +import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector; + +/** + * + * @author Gunnar Hillert + * @since 1.3.0 + */ +public class CustomAuthoritiesOpaqueTokenIntrospector implements OpaqueTokenIntrospector { + + private static final Logger logger = LoggerFactory.getLogger(CustomAuthoritiesOpaqueTokenIntrospector.class); + private final OpaqueTokenIntrospector delegate; + private DefaultPrincipalExtractor principalExtractor; + private AuthoritiesMapper authorityMapper; + + public CustomAuthoritiesOpaqueTokenIntrospector( + String introspectionUri, + String clientId, + String clientSecret, + AuthoritiesMapper authorityMapper) { + this.delegate = new NimbusOpaqueTokenIntrospector(introspectionUri, clientId, clientSecret); + this.principalExtractor = new DefaultPrincipalExtractor(); + this.authorityMapper = authorityMapper; + } + + @Override + public OAuth2AuthenticatedPrincipal introspect(String token) { + logger.debug("Introspecting"); + OAuth2AuthenticatedPrincipal principal = this.delegate.introspect(token); + Object principalName = principalExtractor.extractPrincipal(principal.getAttributes()); + return new DefaultOAuth2AuthenticatedPrincipal( + principalName.toString(), principal.getAttributes(), extractAuthorities(principal, token)); + } + + private Collection extractAuthorities(OAuth2AuthenticatedPrincipal principal, String token) { + final List scopes = principal.getAttribute(OAuth2TokenIntrospectionClaimNames.SCOPE); + final Set scopesAsSet = new HashSet<>(scopes); + final Set authorities = this.authorityMapper.mapScopesToAuthorities(null, scopesAsSet, token); + final Set authorities2 = this.authorityMapper.mapClaimsToAuthorities(null, Arrays.asList("groups", "roles")); + authorities.addAll(authorities2); + return authorities; + } + + public void setPrincipalExtractor(DefaultPrincipalExtractor principalExtractor) { + this.principalExtractor = principalExtractor; + } + + public void setAuthorityMapper(AuthoritiesMapper authorityMapper) { + this.authorityMapper = authorityMapper; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java new file mode 100644 index 0000000000..7ba93044f1 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java @@ -0,0 +1,90 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest; +import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserService; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserService; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.OAuth2AuthenticationException; +import org.springframework.security.oauth2.core.oidc.user.DefaultOidcUser; +import org.springframework.security.oauth2.core.oidc.user.OidcUser; +import org.springframework.util.StringUtils; + +/** + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public class CustomOAuth2OidcUserService implements OAuth2UserService { + + private final static Logger log = LoggerFactory.getLogger(CustomOAuth2OidcUserService.class); + final OidcUserService delegate = new OidcUserService(); + final AuthoritiesMapper authorityMapper; + + public CustomOAuth2OidcUserService(AuthoritiesMapper authorityMapper) { + this.authorityMapper = authorityMapper; + } + + @Override + public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException { + log.debug("Load user"); + final OidcUser oidcUser = delegate.loadUser(userRequest); + final OAuth2AccessToken accessToken = userRequest.getAccessToken(); + final Set mappedAuthorities1 = this.authorityMapper.mapScopesToAuthorities( + userRequest.getClientRegistration().getRegistrationId(), accessToken.getScopes(), + accessToken.getTokenValue()); + + List roleClaims = oidcUser.getClaimAsStringList("groups"); + if (roleClaims == null) { + roleClaims = oidcUser.getClaimAsStringList("roles"); + } + if (roleClaims == null) { + roleClaims = new ArrayList<>(); + } + log.debug("roleClaims: {}", roleClaims); + Set mappedAuthorities2 = this.authorityMapper + .mapClaimsToAuthorities(userRequest.getClientRegistration().getRegistrationId(), roleClaims); + + final String userNameAttributeName = userRequest.getClientRegistration() + .getProviderDetails().getUserInfoEndpoint().getUserNameAttributeName(); + + log.debug("AccessToken: {}", accessToken.getTokenValue()); + + HashSet mappedAuthorities = new HashSet<>(mappedAuthorities1); + mappedAuthorities.addAll(mappedAuthorities2); + + final OidcUser oidcUserToReturn; + // OidcUser oidcUserToReturn; + + if (StringUtils.hasText(userNameAttributeName)) { + oidcUserToReturn = new DefaultOidcUser(mappedAuthorities, userRequest.getIdToken(), oidcUser.getUserInfo(), + userNameAttributeName); + } else { + oidcUserToReturn = new DefaultOidcUser(mappedAuthorities, userRequest.getIdToken(), oidcUser.getUserInfo()); + } + return oidcUserToReturn; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java new file mode 100644 index 0000000000..249f6d6688 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.oauth2.client.userinfo.DefaultOAuth2UserService; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserService; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.OAuth2AuthenticationException; +import org.springframework.security.oauth2.core.user.DefaultOAuth2User; +import org.springframework.security.oauth2.core.user.OAuth2User; + +/** + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public class CustomPlainOAuth2UserService implements OAuth2UserService { + + private final static Logger log = LoggerFactory.getLogger(CustomPlainOAuth2UserService.class); + final DefaultOAuth2UserService delegate = new DefaultOAuth2UserService(); + final AuthoritiesMapper authorityMapper; + + public CustomPlainOAuth2UserService(AuthoritiesMapper authorityMapper) { + this.authorityMapper = authorityMapper; + } + + @Override + public OAuth2User loadUser(OAuth2UserRequest userRequest) throws OAuth2AuthenticationException { + log.debug("Load user"); + final OAuth2User oauth2User = delegate.loadUser(userRequest); + final OAuth2AccessToken accessToken = userRequest.getAccessToken(); + log.debug("AccessToken: {}", accessToken.getTokenValue()); + + final Set mappedAuthorities = this.authorityMapper.mapScopesToAuthorities( + userRequest.getClientRegistration().getRegistrationId(), accessToken.getScopes(), + accessToken.getTokenValue()); + final String userNameAttributeName = userRequest.getClientRegistration() + .getProviderDetails().getUserInfoEndpoint().getUserNameAttributeName(); + final OAuth2User oauth2UserToReturn = new DefaultOAuth2User(mappedAuthorities, oauth2User.getAttributes(), + userNameAttributeName); + return oauth2UserToReturn; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java new file mode 100644 index 0000000000..b5e9dc82e4 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java @@ -0,0 +1,233 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.net.URI; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.common.security.ProviderRoleMapping; +import org.springframework.security.config.core.GrantedAuthorityDefaults; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Default {@link AuthoritiesMapper}. + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public class DefaultAuthoritiesMapper implements AuthoritiesMapper { + + private static final Logger logger = LoggerFactory.getLogger(DefaultAuthoritiesMapper.class); + private final Map providerRoleMappings; + private final String defaultProviderId; + + public DefaultAuthoritiesMapper(Map providerRoleMappings, String defaultProviderId) { + super(); + + Assert.notNull(providerRoleMappings, "providerRoleMappings must not be null."); + for (Entry providerRoleMappingToValidate : providerRoleMappings.entrySet()) { + providerRoleMappingToValidate.getValue().convertRoleMappingKeysToCoreSecurityRoles(); + } + + this.providerRoleMappings = providerRoleMappings; + this.defaultProviderId = defaultProviderId; + } + + /** + * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a + * single {@link ProviderRoleMapping}. + * + * @param providerId Create a ProviderRoleMapping with the specified providerId + * @param mapOAuthScopes Shall OAuth scopes be considered? + * @param roleMappings Used to populate the ProviderRoleMapping + */ + public DefaultAuthoritiesMapper(String providerId, boolean mapOAuthScopes, Map roleMappings) { + Assert.hasText(providerId, "The providerId must not be null or empty."); + final ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(mapOAuthScopes, roleMappings); + this.providerRoleMappings = new HashMap(1); + this.providerRoleMappings.put(providerId, providerRoleMapping); + for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) { + providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles(); + } + this.defaultProviderId = providerId; + } + + /** + * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a + * single {@link ProviderRoleMapping}. + * + * @param providerId The provider id for the ProviderRoleMapping + * @param mapOAuthScopes Consider scopes? + */ + public DefaultAuthoritiesMapper(String providerId, boolean mapOAuthScopes) { + Assert.hasText(providerId, "The providerId must not be null or empty."); + final ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(mapOAuthScopes); + this.providerRoleMappings = new HashMap(1); + this.providerRoleMappings.put(providerId, providerRoleMapping); + for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) { + providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles(); + } + this.defaultProviderId = providerId; + } + + /** + * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a + * single {@link ProviderRoleMapping}. + * + * @param providerId The provider id for the ProviderRoleMapping + * @param providerRoleMapping The role mappings to add to the {@link ProviderRoleMapping} + */ + public DefaultAuthoritiesMapper(String providerId, ProviderRoleMapping providerRoleMapping) { + this.providerRoleMappings = new HashMap(1); + this.providerRoleMappings.put(providerId, providerRoleMapping); + for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) { + providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles(); + } + this.defaultProviderId = providerId; + } + + /** + * The returned {@link List} of {@link GrantedAuthority}s contains all roles from + * {@link CoreSecurityRoles}. The roles are prefixed with the value specified in + * {@link GrantedAuthorityDefaults}. + * + * @param clientIdParam If null, the default defaultProviderId is used + * @param scopes Must not be null + * @param token Ignored in this implementation + */ + @Override + public Set mapScopesToAuthorities(String clientIdParam, Set scopes, String token) { + logger.debug("Mapping scopes to authorities"); + final String clientId; + if (clientIdParam == null) { + clientId = this.defaultProviderId; + } + else { + clientId = clientIdParam; + } + Assert.notNull(scopes, "The scopes argument must not be null."); + + final ProviderRoleMapping roleMapping = this.providerRoleMappings.get(clientId); + + if (roleMapping == null) { + throw new IllegalArgumentException("No role mapping found for clientId " + clientId); + } + + final List rolesAsStrings = new ArrayList<>(); + + Set grantedAuthorities = new HashSet<>(); + + if (roleMapping.isMapOauthScopes()) { + if (!scopes.isEmpty()) { + for (Map.Entry roleMappingEngtry : roleMapping.convertRoleMappingKeysToCoreSecurityRoles().entrySet()) { + final CoreSecurityRoles role = roleMappingEngtry.getKey(); + final String expectedOAuthScope = roleMappingEngtry.getValue(); + Set scopeList = roleMapping.isParseOauthScopePathParts() ? pathParts(scopes) : scopes; + for (String scope : scopeList) { + if (scope.equalsIgnoreCase(expectedOAuthScope)) { + final SimpleGrantedAuthority oauthRoleAuthority = new SimpleGrantedAuthority(roleMapping.getRolePrefix() + role.getKey()); + rolesAsStrings.add(oauthRoleAuthority.getAuthority()); + grantedAuthorities.add(oauthRoleAuthority); + } + } + } + logger.info("Adding roles: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings)); + } + } + else if (!roleMapping.isMapGroupClaims()) { + grantedAuthorities = + roleMapping.convertRoleMappingKeysToCoreSecurityRoles().entrySet().stream().map(mapEntry -> { + final CoreSecurityRoles role = mapEntry.getKey(); + rolesAsStrings.add(role.getKey()); + return new SimpleGrantedAuthority(roleMapping.getRolePrefix() + mapEntry.getKey()); + }).collect(Collectors.toSet()); + logger.info("Adding ALL roles: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings)); + } + return grantedAuthorities; + } + + @Override + public Set mapClaimsToAuthorities(String clientIdParam, List claims) { + logger.debug("Mapping claims to authorities"); + final String clientId; + if (clientIdParam == null) { + clientId = this.defaultProviderId; + } + else { + clientId = clientIdParam; + } + + final ProviderRoleMapping groupMapping = this.providerRoleMappings.get(clientId); + if (groupMapping == null) { + throw new IllegalArgumentException("No role mapping found for clientId " + clientId); + } + + final List rolesAsStrings = new ArrayList<>(); + final Set grantedAuthorities = new HashSet<>(); + + if (groupMapping.isMapGroupClaims()) { + if (!claims.isEmpty()) { + for (Map.Entry roleMappingEngtry : groupMapping.convertGroupMappingKeysToCoreSecurityRoles().entrySet()) { + final CoreSecurityRoles role = roleMappingEngtry.getKey(); + final String expectedOAuthScope = roleMappingEngtry.getValue(); + logger.debug("Checking group mapping {} {}", role, expectedOAuthScope); + for (String claim : claims) { + logger.debug("Checking against claim {} {}", claim, expectedOAuthScope); + if (claim.equalsIgnoreCase(expectedOAuthScope)) { + final SimpleGrantedAuthority oauthRoleAuthority = new SimpleGrantedAuthority(groupMapping.getRolePrefix() + role.getKey()); + rolesAsStrings.add(oauthRoleAuthority.getAuthority()); + grantedAuthorities.add(oauthRoleAuthority); + logger.debug("Adding to granted authorities {}", oauthRoleAuthority); + } + } + } + logger.info("Adding groups: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings)); + } + } + + return grantedAuthorities; + } + + private Set pathParts(Set scopes) { + // String away leading part if scope is something like + // api://dataflow-server/dataflow.create resulting dataflow.create + return scopes.stream().map(scope -> { + try { + URI uri = URI.create(scope); + String path = uri.getPath(); + if (StringUtils.hasText(path) && path.charAt(0) == '/') { + return path.substring(1); + } + } catch (Exception e) { + } + return scope; + }) + .collect(Collectors.toSet()); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java new file mode 100644 index 0000000000..063c6b7917 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java @@ -0,0 +1,110 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; +import org.springframework.security.oauth2.core.AbstractOAuth2Token; +import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthentication; +import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationToken; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Utility methods for retrieving access tokens. + * + * @author Gunnar Hillert + */ +public class DefaultOAuth2TokenUtilsService implements OAuth2TokenUtilsService { + + private final OAuth2AuthorizedClientService oauth2AuthorizedClientService; + + public DefaultOAuth2TokenUtilsService(OAuth2AuthorizedClientService oauth2AuthorizedClientService) { + Assert.notNull(oauth2AuthorizedClientService, "oauth2AuthorizedClientService must not be null."); + this.oauth2AuthorizedClientService = oauth2AuthorizedClientService; + } + + /** + * Retrieves the access token from the {@link Authentication} implementation. + * + * @return May return null. + */ + @Override + public String getAccessTokenOfAuthenticatedUser() { + + final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); + + if (authentication == null) { + throw new IllegalStateException("Cannot retrieve the authentication object from the SecurityContext. Are you authenticated?"); + } + + final String accessTokenOfAuthenticatedUser; + + if (authentication instanceof BearerTokenAuthentication) { + accessTokenOfAuthenticatedUser = ((BearerTokenAuthentication) authentication).getToken().getTokenValue(); + } + else if (authentication instanceof OAuth2AuthenticationToken) { + final OAuth2AuthenticationToken oauth2AuthenticationToken = (OAuth2AuthenticationToken) authentication; + final OAuth2AuthorizedClient oauth2AuthorizedClient = this.getAuthorizedClient(oauth2AuthenticationToken); + accessTokenOfAuthenticatedUser = oauth2AuthorizedClient.getAccessToken().getTokenValue(); + } + else if (authentication instanceof JwtAuthenticationToken) { + AbstractOAuth2Token token = (AbstractOAuth2Token) authentication.getCredentials(); + accessTokenOfAuthenticatedUser = token.getTokenValue(); + } + else { + throw new IllegalStateException("Unsupported authentication object type " + authentication); + } + + return accessTokenOfAuthenticatedUser; + } + + @Override + public OAuth2AuthorizedClient getAuthorizedClient(OAuth2AuthenticationToken auth2AuthenticationToken) { + + final String principalName = auth2AuthenticationToken.getName(); + final String clientRegistrationId = auth2AuthenticationToken.getAuthorizedClientRegistrationId(); + + if (!StringUtils.hasText(principalName)) { + throw new IllegalStateException("The retrieved principalName must not be null or empty."); + } + + if (!StringUtils.hasText(clientRegistrationId)) { + throw new IllegalStateException("The retrieved clientRegistrationId must not be null or empty."); + } + + final OAuth2AuthorizedClient oauth2AuthorizedClient = this.oauth2AuthorizedClientService.loadAuthorizedClient(clientRegistrationId, principalName); + + if (oauth2AuthorizedClient == null) { + throw new IllegalStateException(String.format( + "No oauth2AuthorizedClient returned for clientRegistrationId '%s' and principalName '%s'.", + clientRegistrationId, principalName)); + } + return oauth2AuthorizedClient; + } + + @Override + public void removeAuthorizedClient(OAuth2AuthorizedClient auth2AuthorizedClient) { + Assert.notNull(auth2AuthorizedClient, "The auth2AuthorizedClient must not be null."); + this.oauth2AuthorizedClientService.removeAuthorizedClient( + auth2AuthorizedClient.getClientRegistration().getRegistrationId(), + auth2AuthorizedClient.getPrincipalName()); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java new file mode 100644 index 0000000000..a8d5254993 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Map; + +/** + * The default implementation of the {@link PrincipalExtractor} that extracts the username + * of the principal. + * + * @author Gunnar Hillert + * + */ +public class DefaultPrincipalExtractor implements PrincipalExtractor { + + private static final String[] PRINCIPAL_KEYS = new String[] { "user_name", "user", "username", + "userid", "user_id", "login", "id", "name", "cid", "client_id" }; + + @Override + public Object extractPrincipal(Map map) { + for (String key : PRINCIPAL_KEYS) { + if (map.containsKey(key)) { + return map.get(key); + } + } + return null; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java new file mode 100644 index 0000000000..799d44b0c4 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java @@ -0,0 +1,132 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.net.URI; +import java.util.HashSet; +import java.util.Locale; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.oauth2.client.http.OAuth2ErrorResponseErrorHandler; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; +import org.springframework.web.client.RestOperations; +import org.springframework.web.client.RestTemplate; + +/** + * {@link AuthoritiesMapper} that looks up + * {@link CoreSecurityRoles} from an external HTTP resource. Requests to the + * external HTTP resource are authenticated by forwarding the user's access + * token. The external resource's response body MUST be a JSON array + * containing strings with values corresponding to + * {@link CoreSecurityRoles#key} values. For example, a response containing + * {@code ["VIEW", "CREATE"]} would grant the user + * {@code ROLE_VIEW, ROLE_CREATE}, + * + * @author Mike Heath + * @author Gunnar Hillert + */ +public class ExternalOauth2ResourceAuthoritiesMapper implements AuthoritiesMapper { + + private static final Logger logger = LoggerFactory.getLogger(ExternalOauth2ResourceAuthoritiesMapper.class); + + public static final GrantedAuthority CREATE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.CREATE.getKey()); + public static final GrantedAuthority DEPLOY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.DEPLOY.getKey()); + public static final GrantedAuthority DESTROY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.DESTROY.getKey()); + public static final GrantedAuthority MANAGE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.MANAGE.getKey()); + public static final GrantedAuthority MODIFY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.MODIFY.getKey()); + public static final GrantedAuthority SCHEDULE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.SCHEDULE.getKey()); + public static final GrantedAuthority VIEW = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.VIEW.getKey()); + + private final URI roleProviderUri; + private final RestOperations restOperations; + + /** + * + * @param roleProviderUri a HTTP GET request is sent to this URI to fetch + * the user's security roles + */ + public ExternalOauth2ResourceAuthoritiesMapper( + URI roleProviderUri) { + Assert.notNull(roleProviderUri, "The provided roleProviderUri must not be null."); + this.roleProviderUri = roleProviderUri; + + final RestTemplate restTemplate = new RestTemplate(); + restTemplate.setErrorHandler(new OAuth2ErrorResponseErrorHandler()); + this.restOperations = restTemplate; + } + + + @Override + public Set mapScopesToAuthorities(String providerId, Set scopes, String token) { + logger.debug("Getting permissions from {}", roleProviderUri); + + final HttpHeaders headers = new HttpHeaders(); + headers.add(HttpHeaders.AUTHORIZATION, OAuth2AccessToken.TokenType.BEARER.getValue() + " " + token); + + final HttpEntity entity = new HttpEntity<>(null, headers); + final ResponseEntity response = restOperations.exchange(roleProviderUri, HttpMethod.GET, entity, String[].class); + + final Set authorities = new HashSet<>(); + for (String permission : response.getBody()) { + if (!StringUtils.hasText(permission)) { + logger.warn("Received an empty permission from {}", roleProviderUri); + } else { + final CoreSecurityRoles securityRole = CoreSecurityRoles.fromKey(permission.toUpperCase(Locale.ROOT)); + if (securityRole == null) { + logger.warn("Invalid role {} provided by {}", permission, roleProviderUri); + } else { + switch (securityRole) { + case CREATE: + authorities.add(CREATE); + break; + case DEPLOY: + authorities.add(DEPLOY); + break; + case DESTROY: + authorities.add(DESTROY); + break; + case MANAGE: + authorities.add(MANAGE); + break; + case MODIFY: + authorities.add(MODIFY); + break; + case SCHEDULE: + authorities.add(SCHEDULE); + break; + case VIEW: + authorities.add(VIEW); + break; + } + } + } + } + logger.info("Roles added for user: {}.", authorities); + return authorities; + } +} + diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java new file mode 100644 index 0000000000..e31c908e8a --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java @@ -0,0 +1,201 @@ +/* + * Copyright 2020-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.common.security.support; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.oauth2.jwt.Jwt; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Extracts the {@link GrantedAuthority}s from scope attributes typically found + * in a {@link Jwt}. + * + * @author Gunnar Hillert + * @author Janne Valkealahti + */ +public final class MappingJwtGrantedAuthoritiesConverter implements Converter> { + + private final static Logger log = LoggerFactory.getLogger(MappingJwtGrantedAuthoritiesConverter.class); + private static final String DEFAULT_AUTHORITY_PREFIX = "SCOPE_"; + + private static final Collection WELL_KNOWN_SCOPES_CLAIM_NAMES = + Arrays.asList("scope", "scp"); + private static final Collection WELL_KNOWN_GROUPS_CLAIM_NAMES = + Arrays.asList("groups", "roles"); + + private String authorityPrefix = DEFAULT_AUTHORITY_PREFIX; + + private String authoritiesClaimName; + private String groupAuthoritiesClaimName; + + private Map roleAuthoritiesMapping = new HashMap<>(); + private Map groupAuthoritiesMapping = new HashMap<>(); + + /** + * Extract {@link GrantedAuthority}s from the given {@link Jwt}. + * + * @param jwt The {@link Jwt} token + * @return The {@link GrantedAuthority authorities} read from the token scopes + */ + @Override + public Collection convert(Jwt jwt) { + log.debug("JWT: {}", jwt.getTokenValue()); + Set collect = getAuthorities(jwt).stream() + .flatMap(authority -> { + if (roleAuthoritiesMapping.isEmpty() && groupAuthoritiesMapping.isEmpty()) { + return Stream.of(authority); + } + Stream s1 = roleAuthoritiesMapping.entrySet().stream() + .filter(entry -> entry.getValue().equals(authority)) + .map(entry -> entry.getKey()).distinct(); + Stream s2 = groupAuthoritiesMapping.entrySet().stream() + .filter(entry -> entry.getValue().equals(authority)) + .map(entry -> entry.getKey()).distinct(); + return Stream.concat(s1, s2); + }) + .distinct() + .map(authority -> new SimpleGrantedAuthority(this.authorityPrefix + authority)) + .collect(Collectors.toSet()); + log.debug("JWT granted: {}", collect); + return collect; + } + + /** + * Sets the prefix to use for {@link GrantedAuthority authorities} mapped by this converter. + * Defaults to {@link JwtGrantedAuthoritiesConverter#DEFAULT_AUTHORITY_PREFIX}. + * + * @param authorityPrefix The authority prefix + */ + public void setAuthorityPrefix(String authorityPrefix) { + Assert.notNull(authorityPrefix, "authorityPrefix cannot be null"); + this.authorityPrefix = authorityPrefix; + } + + /** + * Sets the name of token claim to use for mapping {@link GrantedAuthority + * authorities} by this converter. Defaults to + * {@link JwtGrantedAuthoritiesConverter#WELL_KNOWN_SCOPES_CLAIM_NAMES}. + * + * @param authoritiesClaimName The token claim name to map authorities + */ + public void setAuthoritiesClaimName(String authoritiesClaimName) { + Assert.hasText(authoritiesClaimName, "authoritiesClaimName cannot be empty"); + this.authoritiesClaimName = authoritiesClaimName; + } + + /** + * Set the mapping from resolved authorities from jwt into granted authorities. + * + * @param authoritiesMapping the authoritiesMapping to set + */ + public void setAuthoritiesMapping(Map authoritiesMapping) { + Assert.notNull(authoritiesMapping, "authoritiesMapping cannot be null"); + this.roleAuthoritiesMapping = authoritiesMapping; + } + + /** + * Sets the name of token claim to use for group mapping {@link GrantedAuthority + * authorities} by this converter. Defaults to + * {@link org.springframework.security.oauth2.server.resource.authentication.JwtGrantedAuthoritiesConverter#WELL_KNOWN_AUTHORITIES_CLAIM_NAMES}. + * + * @param groupAuthoritiesClaimName the token claim name to map group + * authorities + */ + public void setGroupAuthoritiesClaimName(String groupAuthoritiesClaimName) { + this.groupAuthoritiesClaimName = groupAuthoritiesClaimName; + } + + /** + * Set the group mapping from resolved authorities from jwt into granted + * authorities. + * + * @param groupAuthoritiesMapping + */ + public void setGroupAuthoritiesMapping(Map groupAuthoritiesMapping) { + this.groupAuthoritiesMapping = groupAuthoritiesMapping; + } + + private String getAuthoritiesClaimName(Jwt jwt) { + if (this.authoritiesClaimName != null) { + return this.authoritiesClaimName; + } + for (String claimName : WELL_KNOWN_SCOPES_CLAIM_NAMES) { + if (jwt.hasClaim(claimName)) { + return claimName; + } + } + return null; + } + + private String getGroupAuthoritiesClaimName(Jwt jwt) { + if (this.groupAuthoritiesClaimName != null) { + return this.groupAuthoritiesClaimName; + } + for (String claimName : WELL_KNOWN_GROUPS_CLAIM_NAMES) { + if (jwt.hasClaim(claimName)) { + return claimName; + } + } + return null; + } + + private Collection getAuthorities(Jwt jwt) { + String scopeClaimName = getAuthoritiesClaimName(jwt); + String groupClaimName = getGroupAuthoritiesClaimName(jwt); + + List claimAsStringList1 = null; + List claimAsStringList2 = null; + + // spring-sec does wrong conversion with arrays + if (scopeClaimName != null && !ObjectUtils.isArray(jwt.getClaim(scopeClaimName))) { + claimAsStringList1 = jwt.getClaimAsStringList(scopeClaimName); + } + if (groupClaimName != null && !ObjectUtils.isArray(jwt.getClaim(groupClaimName))) { + claimAsStringList2 = jwt.getClaimAsStringList(groupClaimName); + } + + List claimAsStringList = new ArrayList<>(); + if (claimAsStringList1 != null) { + List collect = claimAsStringList1.stream() + .flatMap(c -> Arrays.stream(c.split(" "))) + .filter(c -> StringUtils.hasText(c)) + .collect(Collectors.toList()); + claimAsStringList.addAll(collect); + } + if (claimAsStringList2 != null) { + claimAsStringList.addAll(claimAsStringList2); + } + return claimAsStringList; + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java new file mode 100644 index 0000000000..c5ad6f25af --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java @@ -0,0 +1,38 @@ +/* + * Copyright 2016-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import org.springframework.boot.autoconfigure.condition.NoneNestedConditions; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.Conditional; + +/** + * {@link Condition} that is only valid if {@code security.basic.enabled} is {@code true} + * and the property {@code security.oauth2} exists. + * + * @author Gunnar Hillert + * @since 1.1.0 + */ +public class OnOAuth2SecurityDisabled extends NoneNestedConditions { + + public OnOAuth2SecurityDisabled() { + super(ConfigurationPhase.REGISTER_BEAN); + } + + @Conditional(OnOAuth2SecurityEnabled.class) + static class OAuthEnabled { + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java new file mode 100644 index 0000000000..fbd0c656b3 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Collections; +import java.util.Map; + +import org.springframework.boot.autoconfigure.condition.ConditionOutcome; +import org.springframework.boot.autoconfigure.condition.SpringBootCondition; +import org.springframework.boot.context.properties.bind.Bindable; +import org.springframework.boot.context.properties.bind.Binder; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.ConditionContext; +import org.springframework.core.env.Environment; +import org.springframework.core.type.AnnotatedTypeMetadata; + +/** + * {@link Condition} that is only valid if the property + * {@code security.oauth2.client.client-id} exists. + * + * @author Gunnar Hillert + * @since 1.1.0 + */ +public class OnOAuth2SecurityEnabled extends SpringBootCondition { + + @Override + public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) { + Map properties = getSubProperties(context.getEnvironment(), "spring.security.oauth2"); + return new ConditionOutcome(!properties.isEmpty(), "OAuth2 Enabled"); + } + + public static Map getSubProperties(Environment environment, String keyPrefix) { + return Binder.get(environment) + .bind(keyPrefix, Bindable.mapOf(String.class, String.class)) + .orElseGet(Collections::emptyMap); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java new file mode 100644 index 0000000000..4fb9e18b45 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Map; + +/** + * @author Gunnar Hillert + * @since 1.3.0 + * + */ +public interface PrincipalExtractor { + + Object extractPrincipal(Map map); +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java new file mode 100644 index 0000000000..efbaf67abf --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java @@ -0,0 +1,75 @@ +/* + * Copyright 2017-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.common.security.AuthorizationProperties; +import org.springframework.http.HttpMethod; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configurers.AuthorizeHttpRequestsConfigurer; +import org.springframework.security.web.access.expression.WebExpressionAuthorizationManager; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * State-holder for computed security meta-information. + * + * @author Gunnar Hillert + */ +public class SecurityConfigUtils { + + private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SecurityConfigUtils.class); + + public static final String ROLE_PREFIX = "ROLE_"; + + public static final Pattern AUTHORIZATION_RULE; + + public static final String BASIC_AUTH_REALM_NAME = "Spring"; + + static { + String methodsRegex = StringUtils.arrayToDelimitedString(HttpMethod.values(), "|"); + AUTHORIZATION_RULE = Pattern.compile("(" + methodsRegex + ")\\s+(.+)\\s+=>\\s+(.+)"); + } + + /** + * Read the configuration for "simple" (that is, not ACL based) security and apply it. + * + * @param auth The Configurer to apply the authorization rules to + * @param authorizationProperties Contains the rules to configure authorization + */ + public static void configureSimpleSecurity( + AuthorizeHttpRequestsConfigurer.AuthorizationManagerRequestMatcherRegistry auth, + AuthorizationProperties authorizationProperties) { + for (String rule : authorizationProperties.getRules()) { + Matcher matcher = AUTHORIZATION_RULE.matcher(rule); + Assert.isTrue(matcher.matches(), + String.format("Unable to parse security rule [%s], expected format is 'HTTP_METHOD ANT_PATTERN => " + + "SECURITY_ATTRIBUTE(S)'", rule)); + + HttpMethod method = HttpMethod.valueOf(matcher.group(1).trim()); + String urlPattern = matcher.group(2).trim(); + String attribute = matcher.group(3).trim(); + + logger.info("Authorization '{}' | '{}' | '{}'", method, attribute, urlPattern); + auth.requestMatchers(method, urlPattern).access(new WebExpressionAuthorizationManager(attribute)); + } + + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java new file mode 100644 index 0000000000..2641ce9f63 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java @@ -0,0 +1,39 @@ +/* + * Copyright 2017-2018 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +/** + * State-holder for computed security meta-information. + * + * @author Gunnar Hillert + */ +public class SecurityStateBean { + + private boolean authenticationEnabled; + + public SecurityStateBean() { + super(); + } + + public boolean isAuthenticationEnabled() { + return authenticationEnabled; + } + + public void setAuthenticationEnabled(boolean authenticationEnabled) { + this.authenticationEnabled = authenticationEnabled; + } + +} diff --git a/spring-cloud-dataflow-server-core/.jdk8 b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring.factories similarity index 100% rename from spring-cloud-dataflow-server-core/.jdk8 rename to spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring.factories diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 0000000000..cc9b88b973 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1 @@ +org.springframework.cloud.common.security.CommonSecurityAutoConfiguration \ No newline at end of file diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java new file mode 100644 index 0000000000..f39a46fe35 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.junit.jupiter.api.Test; + +import org.springframework.boot.test.util.TestPropertyValues; +import org.springframework.cloud.common.security.support.OnOAuth2SecurityDisabled; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +import static org.assertj.core.api.Assertions.assertThat; + +public class OnOAuth2SecurityDisabledTests { + + @Test + public void noPropertySet() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class); + assertThat(context.containsBean("myBean")).isTrue(); + context.close(); + } + + @Test + public void propertyClientIdSet() throws Exception { + AnnotationConfigApplicationContext context = + load(Config.class, "spring.security.oauth2.client.registration.uaa.client-id:12345"); + assertThat(context.containsBean("myBean")).isFalse(); + context.close(); + } + + private AnnotationConfigApplicationContext load(Class config, String... env) { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); + TestPropertyValues.of(env).applyTo(context); + context.register(config); + context.refresh(); + return context; + } + + @Configuration + @Conditional(OnOAuth2SecurityDisabled.class) + public static class Config { + @Bean + public String myBean() { + return "myBean"; + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java new file mode 100644 index 0000000000..4bcfe1789c --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2016-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.junit.jupiter.api.Test; + +import org.springframework.boot.test.util.TestPropertyValues; +import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +/** + * @author Gunnar Hillert + */ +public class OnOAuth2SecurityEnabledTests { + + @Test + public void noPropertySet() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class); + assertThat(context.containsBean("myBean")).isFalse(); + context.close(); + } + + @Test + public void propertySecurityOauth() throws Exception { + assertThatThrownBy(() -> { + load(Config.class, "spring.security.oauth2"); + }).isInstanceOf(IllegalStateException.class); + } + + @Test + public void propertyClientId() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class, + "spring.security.oauth2.client.registration.uaa.client-id:12345"); + assertThat(context.containsBean("myBean")).isTrue(); + context.close(); + } + + @Test + public void clientIdOnlyWithNoValue() throws Exception { + AnnotationConfigApplicationContext context = load(Config.class, + "spring.security.oauth2.client.registration.uaa.client-id"); + assertThat(context.containsBean("myBean")).isTrue(); + context.close(); + } + + private AnnotationConfigApplicationContext load(Class config, String... env) { + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); + TestPropertyValues.of(env).applyTo(context); + context.register(config); + context.refresh(); + return context; + } + + @Configuration + @Conditional(OnOAuth2SecurityEnabled.class) + public static class Config { + @Bean + public String myBean() { + return "myBean"; + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java new file mode 100644 index 0000000000..3332ae70f3 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java @@ -0,0 +1,303 @@ +/* + * Copyright 2017-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.common.security.ProviderRoleMapping; +import org.springframework.security.core.GrantedAuthority; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +/** + * @author Gunnar Hillert + */ +class DefaultAuthoritiesMapperTests { + + @Test + void nullConstructor() throws Exception { + assertThatThrownBy(() -> { + new DefaultAuthoritiesMapper(null, ""); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("providerRoleMappings must not be null."); + } + + @Test + void mapScopesToAuthoritiesWithNullParameters() throws Exception { + DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper(Collections.emptyMap(), ""); + + assertThatThrownBy(() -> { + authoritiesMapper.mapScopesToAuthorities(null, null, null); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("The scopes argument must not be null."); + assertThatThrownBy(() -> { + authoritiesMapper.mapScopesToAuthorities("myClientId", null, null); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("The scopes argument must not be null."); + } + + @Test + void that7AuthoritiesAreReturned() throws Exception { + DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper("uaa", false); + Set authorities = authoritiesMapper.mapScopesToAuthorities("uaa", Collections.emptySet(), null); + + assertThat(authorities).hasSize(7); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW", "ROLE_DEPLOY", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_DESTROY"); + } + + @Test + void emptyMapConstructor() throws Exception { + Set scopes = new HashSet<>(); + scopes.add("dataflow.manage"); + scopes.add("dataflow.view"); + scopes.add("dataflow.create"); + + DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper("uaa", true); + Collection authorities = authoritiesMapper.mapScopesToAuthorities("uaa", scopes, null); + + assertThat(authorities).hasSize(3); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW"); + } + + @Test + void mapConstructorWithIncompleteRoleMappings() throws Exception { + ProviderRoleMapping roleMapping = new ProviderRoleMapping(); + roleMapping.setMapOauthScopes(true); + roleMapping.addRoleMapping("ROLE_MANAGE", "foo-scope-in-oauth"); + assertThatThrownBy(() -> { + new DefaultAuthoritiesMapper("uaa", roleMapping); + }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining( + "The following 6 roles are not mapped: CREATE, DEPLOY, DESTROY, MODIFY, SCHEDULE, VIEW."); + } + + @Test + void that3MappedAuthoritiesAreReturned() throws Exception { + Map roleMappings = Map.of( + "ROLE_MANAGE", "dataflow_manage", + "ROLE_VIEW", "dataflow_view", + "ROLE_CREATE", "dataflow_create", + "ROLE_MODIFY", "dataflow_modify", + "ROLE_DEPLOY", "dataflow_deploy", + "ROLE_DESTROY", "dataflow_destroy", + "ROLE_SCHEDULE", "dataflow_schedule" + ); + + ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(); + providerRoleMapping.setMapOauthScopes(true); + providerRoleMapping.getRoleMappings().putAll(roleMappings); + + Set roles = Set.of("dataflow_manage", "dataflow_view", "dataflow_deploy"); + + DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping); + Collection authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa", + roles, null); + + assertThat(authorities).hasSize(3); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_DEPLOY", "ROLE_MANAGE", "ROLE_VIEW"); + } + @Test + void that7MappedAuthoritiesAreReturned() throws Exception { + Map roleMappings = Map.of( + "ROLE_MANAGE", "foo-manage", + "ROLE_VIEW", "bar-view", + "ROLE_CREATE", "blubba-create", + "ROLE_MODIFY", "foo-modify", + "ROLE_DEPLOY", "foo-deploy", + "ROLE_DESTROY", "foo-destroy", + "ROLE_SCHEDULE", "foo-schedule" + ); + + ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(); + providerRoleMapping.setMapOauthScopes(true); + providerRoleMapping.getRoleMappings().putAll(roleMappings); + + Set scopes = Set.of( + "foo-manage", + "bar-view", + "blubba-create", + "foo-modify", + "foo-deploy", + "foo-destroy", + "foo-schedule" + ); + + DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping); + Collection authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + void that3MappedAuthoritiesAreReturnedForDefaultMapping() throws Exception { + ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(); + providerRoleMapping.setMapOauthScopes(true); + + Set scopes = Set.of( + "dataflow.manage", + "dataflow.view", + "dataflow.create" + ); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", providerRoleMapping); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(3); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW"); + } + + @Test + void that7MappedAuthoritiesAreReturnedForDefaultMappingWithoutMappingScopes() throws Exception { + Set scopes = Set.of( + "dataflow.manage", + "dataflow.view", + "dataflow.create" + ); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", false); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + void that2MappedAuthoritiesAreReturnedForDefaultMapping() throws Exception { + Set scopes = Set.of( + "dataflow.view", + "dataflow.create" + ); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(2); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_VIEW"); + } + + @Test + void that7AuthoritiesAreReturnedAndOneOAuthScopeCoversMultipleServerRoles() throws Exception { + Map roleMappings = Map.of( + "ROLE_MANAGE", "foo-manage", + "ROLE_VIEW", "foo-manage", + "ROLE_DEPLOY", "foo-manage", + "ROLE_DESTROY", "foo-manage", + "ROLE_MODIFY", "foo-manage", + "ROLE_SCHEDULE", "foo-manage", + "ROLE_CREATE", "blubba-create" + ); + + Set scopes = Set.of("foo-manage", "blubba-create"); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true, roleMappings); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList())) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + void thatUriStyleScopeRemovesLeadingPart() throws Exception { + Map roleMappings = Map.of( + "ROLE_MANAGE", "foo-manage", + "ROLE_VIEW", "foo-manage", + "ROLE_DEPLOY", "foo-manage", + "ROLE_DESTROY", "foo-manage", + "ROLE_MODIFY", "foo-manage", + "ROLE_SCHEDULE", "foo-manage", + "ROLE_CREATE", "blubba-create" + ); + + Set scopes = Set.of("api://foobar/foo-manage", "blubba-create"); + + DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true, roleMappings); + Collection authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } + + @Test + void thatUriStyleScopeParsingCanBeDisabled() throws Exception { + Map roleMappings = Map.of( + "ROLE_MANAGE", "/ROLE/2000803042", + "ROLE_VIEW", "/ROLE/2000803036", + "ROLE_DEPLOY", "/ROLE/2000803039", + "ROLE_DESTROY", "/ROLE/20008030340", + "ROLE_MODIFY", "/ROLE/2000803037", + "ROLE_SCHEDULE", "/ROLE/2000803038", + "ROLE_CREATE", "/ROLE/2000803041" + ); + + ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(); + providerRoleMapping.setMapOauthScopes(true); + providerRoleMapping.setParseOauthScopePathParts(false); + providerRoleMapping.getRoleMappings().putAll(roleMappings); + + Set scopes = Set.of( + "/ROLE/2000803042", + "/ROLE/2000803036", + "/ROLE/2000803039", + "/ROLE/20008030340", + "/ROLE/2000803037", + "/ROLE/2000803038", + "/ROLE/2000803041" + ); + + DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping); + Collection authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa", + scopes, null); + + assertThat(authorities).hasSize(7); + assertThat(authorities) + .extracting(GrantedAuthority::getAuthority) + .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY", + "ROLE_SCHEDULE", "ROLE_VIEW"); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java new file mode 100644 index 0000000000..cdd9600a4a --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.io.IOException; +import java.net.URI; +import java.util.HashSet; +import java.util.Set; + +import com.fasterxml.jackson.databind.ObjectMapper; +import okhttp3.mockwebserver.MockResponse; +import okhttp3.mockwebserver.MockWebServer; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; + +import static org.assertj.core.api.Assertions.assertThat; + + +/** + * @author Mike Heath + * @author Gunnar Hillert + * @author Corneil du Plessis + */ +public class ExternalOauth2ResourceAuthoritiesMapperTests { + + public MockWebServer mockBackEnd; + + + @Before + public void setUp() throws IOException { + mockBackEnd = new MockWebServer(); + mockBackEnd.start(); + } + @After + public void tearDown() throws IOException { + mockBackEnd.shutdown(); + } + + + @Test + public void testExtractAuthorities() throws Exception { + assertAuthorities2(mockBackEnd.url("https://github.com/authorities").uri(), "VIEW"); + assertAuthorities2(mockBackEnd.url("https://github.com/authorities").uri(), "VIEW", "CREATE", "MANAGE"); + assertAuthorities2(mockBackEnd.url("https://github.com/").uri(), "MANAGE"); + assertAuthorities2(mockBackEnd.url("https://github.com/").uri(), "DEPLOY", "DESTROY", "MODIFY", "SCHEDULE"); + assertThat(mockBackEnd.getRequestCount()).isEqualTo(4); + } + + private void assertAuthorities2(URI uri, String... roles) throws Exception { + ObjectMapper objectMapper = new ObjectMapper(); + mockBackEnd.enqueue(new MockResponse() + .setBody(objectMapper.writeValueAsString(roles)) + .addHeader("Content-Type", "application/json")); + + final ExternalOauth2ResourceAuthoritiesMapper authoritiesExtractor = + new ExternalOauth2ResourceAuthoritiesMapper(uri); + final Set grantedAuthorities = authoritiesExtractor.mapScopesToAuthorities(null, new HashSet<>(), "1234567"); + for (String role : roles) { + assertThat(grantedAuthorities).containsAnyOf(new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + role)); + } + assertThat(mockBackEnd.takeRequest().getHeaders().get("Authorization")).isEqualTo("Bearer 1234567"); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java new file mode 100644 index 0000000000..ac5fb55274 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java @@ -0,0 +1,271 @@ +/* + * Copyright 2020-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.common.security.support; + +import java.time.Instant; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Test; + +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.oauth2.jwt.Jwt; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * Tests for {@link MappingJwtGrantedAuthoritiesConverter} + * + */ +public class MappingJwtGrantedAuthoritiesConverterTests { + + public static Jwt.Builder jwt() { + return Jwt.withTokenValue("token") + .header("alg", "none") + .audience(Arrays.asList("https://audience.example.org/")) + .expiresAt(Instant.MAX) + .issuedAt(Instant.MIN) + .issuer("https://issuer.example.org/") + .jti("jti") + .notBefore(Instant.MIN) + .subject("mock-test-subject"); + } + + public static Jwt user() { + return jwt() + .claim("sub", "mock-test-subject") + .build(); + } + + @Test + public void convertWhenTokenHasScopeAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("SCOPE_message:read"), + new SimpleGrantedAuthority("SCOPE_message:write")); + } + + @Test + public void convertWithCustomAuthorityPrefixWhenTokenHasScopeAttributeThenTranslatedToAuthoritiesViaMapping() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_"); + Map authoritiesMapping = new HashMap<>(); + authoritiesMapping.put("READ", "message:read"); + authoritiesMapping.put("WRITE", "message:write"); + jwtGrantedAuthoritiesConverter.setAuthoritiesMapping(authoritiesMapping); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactly( + new SimpleGrantedAuthority("ROLE_READ"), + new SimpleGrantedAuthority("ROLE_WRITE")); + } + + @Test + public void convertWithCustomAuthorityWhenTokenHasScopeAttributeThenTranslatedToAuthoritiesViaMapping() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix(""); + Map authoritiesMapping = new HashMap<>(); + authoritiesMapping.put("ROLE_READ", "message:read"); + authoritiesMapping.put("ROLE_WRITE", "message:write"); + jwtGrantedAuthoritiesConverter.setAuthoritiesMapping(authoritiesMapping); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactly( + new SimpleGrantedAuthority("ROLE_READ"), + new SimpleGrantedAuthority("ROLE_WRITE")); + } + + @Test + public void convertWithCustomAuthorityPrefixWhenTokenHasScopeAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scope", "message:read message:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("ROLE_message:read"), + new SimpleGrantedAuthority("ROLE_message:write")); + } + + @Test + public void convertWhenTokenHasEmptyScopeAttributeThenTranslatedToNoAuthorities() { + Jwt jwt = jwt().claim("scope", "").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasScpAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scp", Arrays.asList("message:read", "message:write")).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("SCOPE_message:read"), + new SimpleGrantedAuthority("SCOPE_message:write")); + } + + @Test + public void convertWithCustomAuthorityPrefixWhenTokenHasScpAttributeThenTranslatedToAuthorities() { + Jwt jwt = jwt().claim("scp", Arrays.asList("message:read", "message:write")).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("ROLE_message:read"), + new SimpleGrantedAuthority("ROLE_message:write")); + } + + @Test + public void convertWhenTokenHasEmptyScpAttributeThenTranslatedToNoAuthorities() { + Jwt jwt = jwt().claim("scp", Collections.emptyList()).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasBothScopeAndScpThenScopeAttributeIsTranslatedToAuthorities() { + Jwt jwt = jwt() + .claim("scp", Arrays.asList("message:read", "message:write")) + .claim("scope", "missive:read missive:write") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactly( + new SimpleGrantedAuthority("SCOPE_missive:read"), + new SimpleGrantedAuthority("SCOPE_missive:write")); + } + + @Test + public void convertWhenTokenHasEmptyScopeAndNonEmptyScpThenScopeAttributeIsTranslatedToNoAuthorities() { + Jwt jwt = jwt() + .claim("scp", Arrays.asList("message:read", "message:write")) + .claim("scope", "") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasEmptyScopeAndEmptyScpAttributeThenTranslatesToNoAuthorities() { + Jwt jwt = jwt() + .claim("scp", Collections.emptyList()) + .claim("scope", Collections.emptyList()) + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasNoScopeAndNoScpAttributeThenTranslatesToNoAuthorities() { + Jwt jwt = jwt().claim("xxx", Arrays.asList("message:read", "message:write")).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasUnsupportedTypeForScopeThenTranslatesToNoAuthorities() { + Jwt jwt = jwt().claim("scope", new String[] {"message:read", "message:write"}).build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToAuthorities() { + Jwt jwt = jwt() + .claim("xxx", Arrays.asList("message:read", "message:write")) + .claim("scope", "missive:read missive:write") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("xxx"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).containsExactlyInAnyOrder( + new SimpleGrantedAuthority("SCOPE_message:read"), + new SimpleGrantedAuthority("SCOPE_message:write")); + } + + @Test + public void convertWhenTokenHasEmptyCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToNoAuthorities() { + Jwt jwt = jwt() + .claim("roles", Collections.emptyList()) + .claim("scope", "missive:read missive:write") + .build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("roles"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasNoCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToNoAuthorities() { + Jwt jwt = jwt().claim("scope", "missive:read missive:write").build(); + + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("roles"); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + + assertThat(authorities).isEmpty(); + } + + @Test + public void convertWhenTokenHasGroupClaims() { + Jwt jwt = jwt().claim("groups", Arrays.asList("role1")).build(); + MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter(); + Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt); + assertThat(authorities).containsExactlyInAnyOrder(new SimpleGrantedAuthority("SCOPE_role1")); + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java new file mode 100644 index 0000000000..d0aa68a8c2 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java @@ -0,0 +1,154 @@ +/* + * Copyright 2019-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security.support; + +import java.time.Instant; + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClient; +import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService; +import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken; +import org.springframework.security.oauth2.client.registration.ClientRegistration; +import org.springframework.security.oauth2.core.AuthorizationGrantType; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.OAuth2AccessToken.TokenType; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * + * @author Gunnar Hillert + * + */ +public class OAuth2TokenUtilsServiceTests { + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithNoAuthentication() { + SecurityContextHolder.getContext().setAuthentication(null); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class).hasMessageContaining( + "Cannot retrieve the authentication object from the SecurityContext. Are you authenticated?"); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithWrongAuthentication() { + final Authentication authentication = mock(Authentication.class); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class).hasMessageContaining("Unsupported authentication object type"); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithEmptyPrincipalName() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn(""); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn("uaa"); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("The retrieved principalName must not be null or empty."); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithEmptyClientRegistrationId() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn("FOO"); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn(""); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("The retrieved clientRegistrationId must not be null or empty."); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithWrongClientRegistrationId() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn("my-username"); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn("CID"); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + when(oauth2AuthorizedClientService.loadAuthorizedClient("uaa", "my-username")).thenReturn(getOAuth2AuthorizedClient()); + final OAuth2TokenUtilsService oauth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThatThrownBy(() -> { + oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); + }).isInstanceOf(IllegalStateException.class).hasMessageContaining( + "No oauth2AuthorizedClient returned for clientRegistrationId 'CID' and principalName 'my-username'."); + SecurityContextHolder.getContext().setAuthentication(null); + } + + @Test + public void testGetAccessTokenOfAuthenticatedUserWithAuthentication() { + final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class); + when(authentication.getName()).thenReturn("my-username"); + when(authentication.getAuthorizedClientRegistrationId()).thenReturn("uaa"); + SecurityContextHolder.getContext().setAuthentication(authentication); + + final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class); + when(oauth2AuthorizedClientService.loadAuthorizedClient("uaa", "my-username")).thenReturn(getOAuth2AuthorizedClient()); + final OAuth2TokenUtilsService oauth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService); + + assertThat(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).isEqualTo("foo-bar-123-token"); + SecurityContextHolder.getContext().setAuthentication(null); + } + + private OAuth2AuthorizedClient getOAuth2AuthorizedClient() { + final ClientRegistration clientRegistration = ClientRegistration + .withRegistrationId("uaa") + .clientId("clientId") + .clientSecret("clientSecret") + .redirectUri("blubba") + .authorizationUri("blubba") + .tokenUri("blubba") + .authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE) + .build(); + final OAuth2AccessToken accessToken = new OAuth2AccessToken(TokenType.BEARER, "foo-bar-123-token", Instant.now(), Instant.now().plusMillis(100000)); + final OAuth2AuthorizedClient authorizedClient = new OAuth2AuthorizedClient(clientRegistration, "my-username", accessToken); + return authorizedClient; + } + +} diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml new file mode 100644 index 0000000000..b0148eeee2 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml @@ -0,0 +1,49 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-common-security-config + 3.0.0-SNAPSHOT + + spring-cloud-starter-common-security-config-web + spring-cloud-starter-common-security-config-web + Spring Cloud Starter Common Security Config Web + pom + + true + + + + org.springframework.cloud + spring-cloud-common-security-config-web + ${project.version} + + + org.springframework.boot + spring-boot-starter-web + test + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + + + + test-jar + + + + + + + diff --git a/spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java similarity index 61% rename from spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java rename to spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java index 71c096cc1d..df2f761d8e 100644 --- a/spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,17 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.springframework.cloud.common.security; -package com.acme.common; +import org.junit.jupiter.api.Test; + +import org.springframework.boot.test.context.SpringBootTest; /** - * An enum class used in {@link ConfigProperties}. Useful to test, because this class has - * to be accessible to the ClassLoader used to retrieve metadata. + * Testing startup and configuration * - * @author Eric Bottard + * @author Corneil du Plessis */ -public enum SomeEnum { - one, - two, - three; +@SpringBootTest +class SpringCloudCommonSecurityApplicationTests { + + @Test + void contextLoads() { + } + } diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java new file mode 100644 index 0000000000..08c8855d75 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java @@ -0,0 +1,58 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import java.security.Principal; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.actuate.autoconfigure.metrics.MetricsAutoConfiguration; +import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration; +import org.springframework.boot.autoconfigure.security.servlet.UserDetailsServiceAutoConfiguration; +import org.springframework.boot.autoconfigure.session.SessionAutoConfiguration; +import org.springframework.context.annotation.Import; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Minimal application to verify configuration + * + * @author Corneil du Plessis + */ +@SpringBootApplication(exclude = { + MetricsAutoConfiguration.class, + ManagementWebSecurityAutoConfiguration.class, + SecurityAutoConfiguration.class, + UserDetailsServiceAutoConfiguration.class, + SessionAutoConfiguration.class +}) + +@Import({CommonSecurityAutoConfiguration.class, TestOAuthSecurityConfiguration.class}) +public class SpringCloudCommonSecurityTestApplication { + + public static void main(String[] args) { + SpringApplication.run(SpringCloudCommonSecurityTestApplication.class, args); + } + + @RestController + public static class SimpleController { + @GetMapping("/user") + public String getUser(Principal principal) { + return principal.getName(); + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java new file mode 100644 index 0000000000..0b1b2ea2e8 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java @@ -0,0 +1,49 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.common.security; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled; +import org.springframework.cloud.common.security.support.SecurityStateBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; + +/** + * We need to mimic the configuration of Dataflow and Skipper + * + * @author Corneil du Plessis + */ +@Configuration(proxyBeanMethods = false) +@Conditional(OnOAuth2SecurityEnabled.class) +@Import(TestOAuthSecurityConfiguration.SecurityStateBeanConfig.class) +public class TestOAuthSecurityConfiguration extends OAuthSecurityConfiguration { + + @Configuration(proxyBeanMethods = false) + public static class SecurityStateBeanConfig { + @Bean + public SecurityStateBean securityStateBean() { + return new SecurityStateBean(); + } + + @Bean + @ConfigurationProperties(prefix = "spring.cloud.common.security.test.authorization") + public AuthorizationProperties authorizationProperties() { + return new AuthorizationProperties(); + } + } +} diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml new file mode 100644 index 0000000000..e5de703119 --- /dev/null +++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml @@ -0,0 +1,40 @@ +logging: +# file: +# name: sccsc-test.log + level: + org.springframework: DEBUG +spring: + security: + oauth2: + client: + registration: + uaa: + redirect-uri: '{baseUrl}/login/oauth2/code/{registrationId}' + authorization-grant-type: authorization_code + client-id: myclient + client-secret: mysecret + access-token-uri: http://127.0.0.1:8888/oauth/token + user-authorization-uri: http://127.0.0.1:8888/oauth/authorize + provider: + uaa: + authorization-uri: http://127.0.0.1:8888/oauth/authorize + user-info-uri: http://127.0.0.1:8888/me + token-uri: http://127.0.0.1:8888/oauth/token + resourceserver: + opaquetoken: + introspection-uri: http://127.0.0.1:8888/oauth/check_token + client-id: myclient + client-secret: mysecret + cloud: + common: + security: + test: + authorization: + check-token-access: isAuthenticated() + authorization: + enabled: true + permit-all-paths: "/user,./assets/**,/dashboard/logout-success-oauth.html" + authenticated-paths: "/user" + rules: + # User + - GET /user => hasRole('ROLE_VIEW') diff --git a/spring-cloud-dataflow-audit/pom.xml b/spring-cloud-dataflow-audit/pom.xml index 1a4f7ddff6..2adea4114f 100644 --- a/spring-cloud-dataflow-audit/pom.xml +++ b/spring-cloud-dataflow-audit/pom.xml @@ -1,21 +1,36 @@ - + 4.0.0 spring-cloud-dataflow-parent org.springframework.cloud - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-audit + spring-cloud-dataflow-audit + Spring Cloud Data Flow Audit + jar + + true + 3.4.1 + + + jakarta.persistence + jakarta.persistence-api + org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.boot @@ -23,4 +38,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + true + ${java.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java index 70f4d2ea12..350b2b15dc 100644 --- a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java +++ b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java @@ -19,14 +19,15 @@ import java.util.ArrayList; import java.util.List; -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.TypedQuery; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Path; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.Path; +import jakarta.persistence.criteria.Predicate; +import jakarta.persistence.criteria.Root; +import org.hibernate.query.sqm.tree.select.SqmSelectStatement; import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepositoryCustom; import org.springframework.cloud.dataflow.core.AuditActionType; @@ -121,14 +122,7 @@ else if (fromDate != null && toDate != null) { final List resultList = typedQuery.getResultList(); - final CriteriaQuery countQuery = cb.createQuery(Long.class); - countQuery.select(cb.count(countQuery.from(AuditRecord.class))); - - if (!finalQueryPredicates.isEmpty()) { - countQuery.where(finalQueryPredicates.toArray(new Predicate[0])); - } - - final Long totalCount = entityManager.createQuery(countQuery) + final Long totalCount = (Long)entityManager.createQuery(((SqmSelectStatement)select).createCountQuery()) .getSingleResult(); return new PageImpl<>(resultList, pageable, totalCount); diff --git a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java index 7eff50695e..84eccdc2df 100644 --- a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java +++ b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.audit.service; import java.time.Instant; +import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -26,6 +27,7 @@ import org.slf4j.LoggerFactory; import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository; +import org.springframework.cloud.dataflow.core.ArgumentSanitizer; import org.springframework.cloud.dataflow.core.AuditActionType; import org.springframework.cloud.dataflow.core.AuditOperationType; import org.springframework.cloud.dataflow.core.AuditRecord; @@ -38,74 +40,107 @@ * * @author Gunnar Hillert * @author Daniel Serleg + * @author Corneil du Plessis */ public class DefaultAuditRecordService implements AuditRecordService { - private static final Logger logger = LoggerFactory.getLogger(DefaultAuditRecordService.class); - - private final AuditRecordRepository auditRecordRepository; - - private final ObjectMapper objectMapper; - - public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository) { - Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null."); - this.auditRecordRepository = auditRecordRepository; - this.objectMapper = new ObjectMapper(); - this.objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); - } - - public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository, ObjectMapper objectMapper) { - Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null."); - Assert.notNull(objectMapper, "objectMapper must not be null."); - this.auditRecordRepository = auditRecordRepository; - this.objectMapper = objectMapper; - } - - @Override - public AuditRecord populateAndSaveAuditRecord(AuditOperationType auditOperationType, - AuditActionType auditActionType, - String correlationId, String data, String platformName) { - Assert.notNull(auditActionType, "auditActionType must not be null."); - Assert.notNull(auditOperationType, "auditOperationType must not be null."); - - final AuditRecord auditRecord = new AuditRecord(); - auditRecord.setAuditAction(auditActionType); - auditRecord.setAuditOperation(auditOperationType); - auditRecord.setCorrelationId(correlationId); - auditRecord.setAuditData(data); - auditRecord.setPlatformName(platformName); - return this.auditRecordRepository.save(auditRecord); - } - - @Override - public AuditRecord populateAndSaveAuditRecordUsingMapData(AuditOperationType auditOperationType, - AuditActionType auditActionType, - String correlationId, Map data, String platformName) { - String dataAsString; - try { - dataAsString = objectMapper.writeValueAsString(data); - } - catch (JsonProcessingException e) { - logger.error("Error serializing audit record data. Data = " + data); - dataAsString = "Error serializing audit record data. Data = " + data; - } - return this.populateAndSaveAuditRecord(auditOperationType, auditActionType, correlationId, dataAsString, platformName); - } - - @Override - public Page findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate( - Pageable pageable, - AuditActionType[] actions, - AuditOperationType[] operations, - Instant fromDate, - Instant toDate) { - return this.auditRecordRepository.findByActionTypeAndOperationTypeAndDate(operations, actions, fromDate, toDate, - pageable); - } - - @Override - public Optional findById(Long id) { - return this.auditRecordRepository.findById(id); - } + private static final Logger logger = LoggerFactory.getLogger(DefaultAuditRecordService.class); + + private final AuditRecordRepository auditRecordRepository; + + private final ObjectMapper objectMapper; + + private final ArgumentSanitizer sanitizer; + + public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository) { + + this(auditRecordRepository, null); + } + + public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository, ObjectMapper objectMapper) { + + Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null."); + this.auditRecordRepository = auditRecordRepository; + if (objectMapper == null) { + objectMapper = new ObjectMapper(); + objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + } + this.objectMapper = objectMapper; + this.sanitizer = new ArgumentSanitizer(); + } + + @Override + public AuditRecord populateAndSaveAuditRecord(AuditOperationType auditOperationType, + AuditActionType auditActionType, + String correlationId, String data, String platformName) { + + Assert.notNull(auditActionType, "auditActionType must not be null."); + Assert.notNull(auditOperationType, "auditOperationType must not be null."); + + final AuditRecord auditRecord = new AuditRecord(); + auditRecord.setAuditAction(auditActionType); + auditRecord.setAuditOperation(auditOperationType); + auditRecord.setCorrelationId(correlationId); + auditRecord.setAuditData(data); + auditRecord.setPlatformName(platformName); + return this.auditRecordRepository.save(auditRecord); + } + + @Override + public AuditRecord populateAndSaveAuditRecordUsingMapData( + AuditOperationType auditOperationType, + AuditActionType auditActionType, + String correlationId, Map data, + String platformName + ) { + + String dataAsString; + try { + Map sanitizedData = sanitizeMap(data); + dataAsString = objectMapper.writeValueAsString(sanitizedData); + } catch (JsonProcessingException e) { + logger.error("Error serializing audit record data. Data = " + data); + dataAsString = "Error serializing audit record data. Data = " + data; + } + return this.populateAndSaveAuditRecord(auditOperationType, auditActionType, correlationId, dataAsString, platformName); + } + + private Map sanitizeMap(Map data) { + + final Map result = new HashMap<>(); + data.forEach((k, v) -> result.put(k, sanitize(k, v))); + return result; + } + + private Object sanitize(String key, Object value) { + + if (value instanceof String) { + return sanitizer.sanitize(key, (String) value); + } else if (value instanceof Map) { + Map input = (Map) value; + return sanitizeMap(input); + } else { + return value; + } + } + + + @Override + public Page findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate( + Pageable pageable, + AuditActionType[] actions, + AuditOperationType[] operations, + Instant fromDate, + Instant toDate) { + + return this.auditRecordRepository.findByActionTypeAndOperationTypeAndDate(operations, actions, fromDate, toDate, + pageable); + } + + @Override + public Optional findById(Long id) { + + return this.auditRecordRepository.findById(id); + } } diff --git a/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java b/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java index 5b12a0bf03..2d084d881e 100644 --- a/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java +++ b/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java @@ -15,13 +15,15 @@ */ package org.springframework.cloud.dataflow.server.audit.service; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository; @@ -32,8 +34,8 @@ import org.springframework.cloud.dataflow.core.AuditRecord; import org.springframework.data.domain.PageRequest; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isNull; @@ -45,218 +47,255 @@ /** * @author Gunnar Hillert + * @author Corneil du Plessis */ -public class DefaultAuditRecordServiceTests { - - private AuditRecordRepository auditRecordRepository; - - @Before - public void setupMock() { - this.auditRecordRepository = mock(AuditRecordRepository.class); - } - - @Test - public void testInitializationWithNullParameters() { - try { - new DefaultAuditRecordService(null); - } - catch (IllegalArgumentException e) { - assertEquals("auditRecordRepository must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecord() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, AuditActionType.CREATE, "1234", - "my data", "test-platform"); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); - verify(this.auditRecordRepository, times(1)).save(argument.capture()); - verifyNoMoreInteractions(this.auditRecordRepository); - - AuditRecord auditRecord = argument.getValue(); - - assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); - assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); - assertEquals("1234", auditRecord.getCorrelationId()); - assertEquals("my data", auditRecord.getAuditData()); - assertEquals("test-platform", auditRecord.getPlatformName()); - } - - @Test - public void testPopulateAndSaveAuditRecordWithNullAuditActionType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - try { - auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, null, "1234", "my audit data", "test-platform"); - } - catch (IllegalArgumentException e) { - assertEquals("auditActionType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordWithNullAuditOperationType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - try { - auditRecordService.populateAndSaveAuditRecord(null, AuditActionType.CREATE, "1234", "my audit data", "test-platform"); - } - catch (IllegalArgumentException e) { - assertEquals("auditOperationType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordWithMapData() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo1", "bar1"); - mapAuditData.put("foofoo", "barbar"); - - auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, - "1234", mapAuditData, "test-platform"); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); - verify(this.auditRecordRepository, times(1)).save(argument.capture()); - verifyNoMoreInteractions(this.auditRecordRepository); - - final AuditRecord auditRecord = argument.getValue(); - - assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); - assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); - assertEquals("1234", auditRecord.getCorrelationId()); - assertEquals("{\"foofoo\":\"barbar\",\"foo1\":\"bar1\"}", auditRecord.getAuditData()); - assertEquals("test-platform", auditRecord.getPlatformName()); - } - - @Test - public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditActionType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo", "bar"); - - try { - auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, null, "1234", - mapAuditData, null); - } - catch (IllegalArgumentException e) { - assertEquals("auditActionType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditOperationType() { - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo", "bar"); - - try { - auditRecordService.populateAndSaveAuditRecordUsingMapData(null, AuditActionType.CREATE, "1234", - mapAuditData, null); - } - catch (IllegalArgumentException e) { - assertEquals("auditOperationType must not be null.", e.getMessage()); - return; - } - fail("Expected an Exception to be thrown."); - } - - @Test - public void testPopulateAndSaveAuditRecordUsingMapDataThrowingJsonProcessingException() - throws JsonProcessingException { - final ObjectMapper objectMapper = mock(ObjectMapper.class); - when(objectMapper.writeValueAsString(any(Object.class))).thenThrow(new JsonProcessingException("Error") { - private static final long serialVersionUID = 1L; - }); - - final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, - objectMapper); - - final Map mapAuditData = new HashMap<>(2); - mapAuditData.put("foo", "bar"); - - auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, - "1234", mapAuditData, "test-platform"); - - final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); - verify(this.auditRecordRepository, times(1)).save(argument.capture()); - verifyNoMoreInteractions(this.auditRecordRepository); - - AuditRecord auditRecord = argument.getValue(); - - assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction()); - assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation()); - assertEquals("1234", auditRecord.getCorrelationId()); - assertEquals("Error serializing audit record data. Data = {foo=bar}", auditRecord.getAuditData()); - assertEquals("test-platform", auditRecord.getPlatformName()); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - AuditActionType[] auditActionTypes = { AuditActionType.CREATE }; - AuditOperationType[] auditOperationTypes = { AuditOperationType.STREAM }; - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, - auditOperationTypes, null, null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), - eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullAuditActionType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - AuditOperationType[] auditOperationTypes = { AuditOperationType.STREAM }; - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, - auditOperationTypes, null, null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), - isNull(), isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullOperationType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - AuditActionType[] auditActionTypes = { AuditActionType.CREATE }; - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, - null, null, null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), - eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } - - @Test - public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullActionAndOperationType() { - AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); - - PageRequest pageRequest = PageRequest.of(0, 1); - auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, null, null, - null); - - verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), isNull(), - isNull(), isNull(), eq(pageRequest)); - verifyNoMoreInteractions(this.auditRecordRepository); - } +class DefaultAuditRecordServiceTests { + + private AuditRecordRepository auditRecordRepository; + + @BeforeEach + void setupMock() { + this.auditRecordRepository = mock(AuditRecordRepository.class); + } + + @Test + void initializationWithNullParameters() { + try { + new DefaultAuditRecordService(null); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage()).isEqualTo("auditRecordRepository must not be null."); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + void populateAndSaveAuditRecord() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, AuditActionType.CREATE, "1234", + "my data", "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + AuditRecord auditRecord = argument.getValue(); + + assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE); + assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE); + assertThat(auditRecord.getCorrelationId()).isEqualTo("1234"); + assertThat(auditRecord.getAuditData()).isEqualTo("my data"); + assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform"); + } + + @Test + void populateAndSaveAuditRecordWithNullAuditActionType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + try { + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, null, "1234", "my audit data", "test-platform"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage()).isEqualTo("auditActionType must not be null."); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + void populateAndSaveAuditRecordWithNullAuditOperationType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + try { + auditRecordService.populateAndSaveAuditRecord(null, AuditActionType.CREATE, "1234", "my audit data", "test-platform"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage()).isEqualTo("auditOperationType must not be null."); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + void populateAndSaveAuditRecordWithMapData() throws JsonProcessingException { + final ObjectMapper mapper = new ObjectMapper(); + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, mapper); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo1", "bar1"); + mapAuditData.put("foofoo", "barbar"); + + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, + "1234", mapAuditData, "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + final AuditRecord auditRecord = argument.getValue(); + + assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE); + assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE); + assertThat(auditRecord.getCorrelationId()).isEqualTo("1234"); + assertThat(mapper.readTree(auditRecord.getAuditData())).isEqualTo(mapper.convertValue(mapAuditData, JsonNode.class)); + assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform"); + } + + @Test + void populateAndSaveAuditRecordUsingMapDataWithNullAuditActionType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + + try { + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, null, "1234", + mapAuditData, null); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage()).isEqualTo("auditActionType must not be null."); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + void populateAndSaveAuditRecordUsingMapDataWithNullAuditOperationType() { + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + + try { + auditRecordService.populateAndSaveAuditRecordUsingMapData(null, AuditActionType.CREATE, "1234", + mapAuditData, null); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage()).isEqualTo("auditOperationType must not be null."); + return; + } + fail("Expected an Exception to be thrown."); + } + + @Test + void populateAndSaveAuditRecordUsingMapDataThrowingJsonProcessingException() + throws JsonProcessingException { + final ObjectMapper objectMapper = mock(ObjectMapper.class); + when(objectMapper.writeValueAsString(any(Object.class))).thenThrow(new JsonProcessingException("Error") { + private static final long serialVersionUID = 1L; + }); + + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, + objectMapper); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, + "1234", mapAuditData, "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + AuditRecord auditRecord = argument.getValue(); + + assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE); + assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE); + assertThat(auditRecord.getCorrelationId()).isEqualTo("1234"); + assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform"); + assertThat(auditRecord.getAuditData()).isEqualTo("Error serializing audit record data. Data = {foo=bar}"); + + + } + + @Test + void populateAndSaveAuditRecordUsingSensitiveMapData() { + final ObjectMapper objectMapper = new ObjectMapper(); + final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, objectMapper); + + final Map mapAuditData = new HashMap<>(2); + mapAuditData.put("foo", "bar"); + mapAuditData.put("spring.cloud.config.password", "12345"); + final Map child = new HashMap<>(); + child.put("password", "54321"); + child.put("bar1", "foo2"); + mapAuditData.put("spring.child", child); + mapAuditData.put("spring.empty", Collections.emptyMap()); + + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, + "1234", mapAuditData, "test-platform"); + + final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class); + verify(this.auditRecordRepository, times(1)).save(argument.capture()); + verifyNoMoreInteractions(this.auditRecordRepository); + + AuditRecord auditRecord = argument.getValue(); + + assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE); + assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE); + assertThat(auditRecord.getCorrelationId()).isEqualTo("1234"); + + assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform"); + System.out.println("auditData=" + auditRecord.getAuditData()); + assertThat(auditRecord.getAuditData()).contains("\"******\""); + assertThat(auditRecord.getAuditData()).contains("\"bar\""); + assertThat(auditRecord.getAuditData()).contains("\"foo\""); + assertThat(auditRecord.getAuditData()).contains("\"spring.cloud.config.password\""); + assertThat(auditRecord.getAuditData()).contains("\"password\""); + assertThat(auditRecord.getAuditData()).doesNotContain("54321"); + assertThat(auditRecord.getAuditData()).doesNotContain("12345"); + } + + @Test + void findAuditRecordByAuditOperationTypeAndAuditActionType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + AuditActionType[] auditActionTypes = {AuditActionType.CREATE}; + AuditOperationType[] auditOperationTypes = {AuditOperationType.STREAM}; + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, + auditOperationTypes, null, null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), + eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } + + @Test + void findAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullAuditActionType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + AuditOperationType[] auditOperationTypes = {AuditOperationType.STREAM}; + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, + auditOperationTypes, null, null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes), + isNull(), isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } + + @Test + void findAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullOperationType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + AuditActionType[] auditActionTypes = {AuditActionType.CREATE}; + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes, + null, null, null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), + eq(auditActionTypes), isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } + + @Test + void findAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullActionAndOperationType() { + AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository); + + PageRequest pageRequest = PageRequest.of(0, 1); + auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, null, null, + null); + + verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), isNull(), + isNull(), isNull(), eq(pageRequest)); + verifyNoMoreInteractions(this.auditRecordRepository); + } } diff --git a/spring-cloud-dataflow-autoconfigure/pom.xml b/spring-cloud-dataflow-autoconfigure/pom.xml index 6d84340829..4ae4ac0b74 100644 --- a/spring-cloud-dataflow-autoconfigure/pom.xml +++ b/spring-cloud-dataflow-autoconfigure/pom.xml @@ -1,15 +1,21 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-autoconfigure jar spring-cloud-dataflow-autoconfigure Data Flow Autoconfig + + true + 3.4.1 + org.springframework.boot @@ -18,10 +24,12 @@ org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-platform-kubernetes + ${project.version} io.fabric8 @@ -30,6 +38,7 @@ org.springframework.cloud spring-cloud-dataflow-platform-cloudfoundry + ${project.version} org.springframework.cloud @@ -52,6 +61,11 @@ spring-boot-starter-test test + + com.h2database + h2 + test + @@ -60,6 +74,7 @@ true META-INF/spring.factories + META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -67,7 +82,46 @@ org.apache.maven.plugins maven-resources-plugin + 3.3.1 + + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + 1 + 1 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + diff --git a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java index 362d5ffc89..ebd54aa978 100644 --- a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java +++ b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.autoconfigure; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.cloud.dataflow.core.DefaultStreamDefinitionService; import org.springframework.cloud.dataflow.core.StreamDefinitionService; @@ -26,6 +27,7 @@ * * @author Ilayaperumal Gopinathan */ +@AutoConfiguration public class StreamDefinitionServiceAutoConfiguration { diff --git a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java index 6bc1134e82..cdbe191c3c 100644 --- a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java +++ b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java @@ -19,6 +19,7 @@ import java.util.HashMap; import java.util.Map; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.cloud.dataflow.server.config.DataFlowControllerAutoConfiguration; import org.springframework.cloud.deployer.resource.docker.DockerResourceLoader; @@ -26,14 +27,14 @@ import org.springframework.cloud.deployer.resource.maven.MavenResourceLoader; import org.springframework.cloud.deployer.resource.support.DelegatingResourceLoader; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; import org.springframework.core.io.ResourceLoader; + /** * Auto-configuration for local dataflow server. * * @author Janne Valkealahti */ -@Configuration +@AutoConfiguration @AutoConfigureBefore(DataFlowControllerAutoConfiguration.class) public class LocalDataFlowServerAutoConfiguration { diff --git a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java index 2a461d9493..112e4b9460 100644 --- a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java +++ b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java @@ -18,6 +18,7 @@ import java.util.Collections; import java.util.List; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.cloud.dataflow.server.config.OnLocalPlatform; import org.springframework.cloud.dataflow.server.config.features.SchedulerConfiguration; @@ -26,13 +27,12 @@ import org.springframework.cloud.deployer.spi.scheduler.Scheduler; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Configuration; /** * @author Mark Pollack */ -@Configuration -@Conditional({ OnLocalPlatform.class, SchedulerConfiguration.SchedulerConfigurationPropertyChecker.class }) +@AutoConfiguration +@Conditional({OnLocalPlatform.class, SchedulerConfiguration.SchedulerConfigurationPropertyChecker.class}) public class LocalSchedulerAutoConfiguration { @Bean diff --git a/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories index eb58ce1aee..66237ea2d7 100644 --- a/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories +++ b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories @@ -1,7 +1,3 @@ -org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ - org.springframework.cloud.dataflow.autoconfigure.local.LocalDataFlowServerAutoConfiguration, \ - org.springframework.cloud.dataflow.autoconfigure.local.LocalSchedulerAutoConfiguration, \ - org.springframework.cloud.dataflow.autoconfigure.StreamDefinitionServiceAutoConfiguration org.springframework.context.ApplicationListener=\ org.springframework.cloud.dataflow.autoconfigure.local.ProfileApplicationListener diff --git a/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 0000000000..c5d9f32d79 --- /dev/null +++ b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1,3 @@ +org.springframework.cloud.dataflow.autoconfigure.StreamDefinitionServiceAutoConfiguration +org.springframework.cloud.dataflow.autoconfigure.local.LocalDataFlowServerAutoConfiguration +org.springframework.cloud.dataflow.autoconfigure.local.LocalSchedulerAutoConfiguration \ No newline at end of file diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java index 8b2a4decef..63f830647a 100644 --- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java +++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,8 +18,6 @@ import io.pivotal.reactor.scheduler.ReactorSchedulerClient; import org.cloudfoundry.operations.CloudFoundryOperations; -import org.junit.runner.RunWith; -import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration; @@ -30,21 +28,27 @@ import org.springframework.boot.cloud.CloudPlatform; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.server.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.server.task.TaskDeploymentReader; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration; +import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryTaskLauncher; -import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import static org.mockito.Mockito.mock; /** * @author Christian Tzolov + * @author Corneil du Plessis */ -@RunWith(SpringJUnit4ClassRunner.class) + @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = AbstractSchedulerPerPlatformTest.AutoConfigurationApplication.class) @DirtiesContext @@ -54,39 +58,58 @@ public abstract class AbstractSchedulerPerPlatformTest { protected ApplicationContext context; @Configuration - @EnableAutoConfiguration(exclude = { LocalDataFlowServerAutoConfiguration.class, + @EnableAutoConfiguration(exclude = {LocalDataFlowServerAutoConfiguration.class, CloudFoundryDeployerAutoConfiguration.class, SecurityAutoConfiguration.class, - SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class }) + SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class}) public static class AutoConfigurationApplication { + @Bean + public AppRegistryService appRegistryService() { + return mock(AppRegistryService.class); + } + + @Bean + public TaskDefinitionReader taskDefinitionReader() { + return mock(TaskDefinitionReader.class); + } + + @Bean + public TaskDeploymentReader taskDeploymentReader() { + return mock(TaskDeploymentReader.class); + } + + @Bean + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao() { + return mock(DataflowTaskExecutionQueryDao.class); + } @Configuration @ConditionalOnCloudPlatform(CloudPlatform.CLOUD_FOUNDRY) public static class CloudFoundryMockConfig { @MockBean - protected CloudFoundrySchedulerProperties cloudFoundrySchedulerProperties; + protected CloudFoundryDeploymentProperties cloudFoundryDeploymentProperties; @Bean @Primary public ReactorSchedulerClient reactorSchedulerClient() { - return Mockito.mock(ReactorSchedulerClient.class); + return mock(ReactorSchedulerClient.class); } @Bean @Primary public CloudFoundryOperations cloudFoundryOperations() { - return Mockito.mock(CloudFoundryOperations.class); + return mock(CloudFoundryOperations.class); } @Bean @Primary public CloudFoundryConnectionProperties cloudFoundryConnectionProperties() { - return Mockito.mock(CloudFoundryConnectionProperties.class); + return mock(CloudFoundryConnectionProperties.class); } @Bean @Primary public CloudFoundryTaskLauncher CloudFoundryTaskLauncher() { - return Mockito.mock(CloudFoundryTaskLauncher.class); + return mock(CloudFoundryTaskLauncher.class); } } } diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java index 31eb81ea97..6587ae8f62 100644 --- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java +++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.autoconfigure.local; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.boot.context.event.ApplicationEnvironmentPreparedEvent; import org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryCloudProfileProvider; @@ -35,9 +35,10 @@ * {@link ProfileApplicationListener} test cases * * @author Chris Schaefer + * @author Corneil du Plessis */ -@RunWith(MockitoJUnitRunner.class) -public class ProfileApplicationListenerTest { +@ExtendWith(MockitoExtension.class) +class ProfileApplicationListenerTest { private MockEnvironment environment; @@ -46,21 +47,21 @@ public class ProfileApplicationListenerTest { private ProfileApplicationListener profileApplicationListener; - @Before - public void before() { + @BeforeEach + void before() { environment = new MockEnvironment(); when(event.getEnvironment()).thenReturn(environment); profileApplicationListener = new ProfileApplicationListener(); } @Test - public void shouldEnableLocalProfile() { + void shouldEnableLocalProfile() { profileApplicationListener.onApplicationEvent(event); assertThat(environment.getActiveProfiles()).contains("local"); } @Test - public void shouldNotEnableLocalProfileRunningOnKubernetes() { + void shouldNotEnableLocalProfileRunningOnKubernetes() { environment.setProperty("kubernetes_service_host", "true"); profileApplicationListener.onApplicationEvent(event); assertThat(environment.getActiveProfiles()).doesNotContain("local"); @@ -68,7 +69,7 @@ public void shouldNotEnableLocalProfileRunningOnKubernetes() { } @Test - public void shouldNotEnableLocalProfileRunningOnCloudFoundry() { + void shouldNotEnableLocalProfileRunningOnCloudFoundry() { environment.setProperty("VCAP_APPLICATION", "true"); profileApplicationListener.onApplicationEvent(event); assertThat(environment.getActiveProfiles()).doesNotContain("local"); @@ -76,7 +77,7 @@ public void shouldNotEnableLocalProfileRunningOnCloudFoundry() { } @Test - public void testAddedSpringCloudKubernetesConfigEnabledIsFalse() { + void addedSpringCloudKubernetesConfigEnabledIsFalse() { profileApplicationListener.onApplicationEvent(event); PropertySource propertySource = environment.getPropertySources().get("skipperProfileApplicationListener"); assertThat(propertySource.containsProperty("spring.cloud.kubernetes.enabled")).isTrue(); @@ -84,7 +85,7 @@ public void testAddedSpringCloudKubernetesConfigEnabledIsFalse() { } @Test - public void backOffIfCloudProfileAlreadySet() { + void backOffIfCloudProfileAlreadySet() { // kubernetes profile set by user environment.setActiveProfiles("kubernetes"); // environment says we are on cloud foundry, the profile is 'cloud' @@ -96,7 +97,7 @@ public void backOffIfCloudProfileAlreadySet() { } @Test - public void doNotSetLocalIfKubernetesProfileIsSet() { + void doNotSetLocalIfKubernetesProfileIsSet() { // kubernetes profile set by user environment.setActiveProfiles("kubernetes"); profileApplicationListener.onApplicationEvent(event); @@ -106,7 +107,7 @@ public void doNotSetLocalIfKubernetesProfileIsSet() { } @Test - public void disableProfileApplicationListener() { + void disableProfileApplicationListener() { try { System.setProperty(ProfileApplicationListener.IGNORE_PROFILEAPPLICATIONLISTENER_PROPERTY_NAME, "true"); environment.setProperty("VCAP_APPLICATION", "true"); diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java index 03b38f1ea5..b2ae94a741 100644 --- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java +++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,80 +16,82 @@ package org.springframework.cloud.dataflow.autoconfigure.local; -import org.junit.Test; -import org.junit.experimental.runners.Enclosed; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.boot.cloud.CloudPlatform; import org.springframework.cloud.deployer.spi.kubernetes.KubernetesSchedulerProperties; import org.springframework.cloud.deployer.spi.scheduler.Scheduler; -import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties; import org.springframework.test.context.TestPropertySource; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; /** * @author Christian Tzolov + * @author Corneil du Plessis */ -@RunWith(Enclosed.class) public class SchedulerPerPlatformTest { + @Nested @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=false" }) - public static class AllSchedulerDisabledTests extends AbstractSchedulerPerPlatformTest { + class AllSchedulerDisabledTests extends AbstractSchedulerPerPlatformTest { - @Test(expected = NoSuchBeanDefinitionException.class) - public void testLocalSchedulerEnabled() { - assertFalse(context.getEnvironment().containsProperty("kubernetes_service_host")); - assertFalse(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); - context.getBean(Scheduler.class); + @Test + void localSchedulerEnabled() { + assertThat(context.getEnvironment().containsProperty("kubernetes_service_host")).isFalse(); + assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).isFalse(); + assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() -> { + context.getBean(Scheduler.class); + }); } } + @Nested @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true" }) - public static class LocalSchedulerTests extends AbstractSchedulerPerPlatformTest { + class LocalSchedulerTests extends AbstractSchedulerPerPlatformTest { @Test - public void testLocalSchedulerEnabled() { - assertFalse("K8s should be disabled", context.getEnvironment().containsProperty("kubernetes_service_host")); - assertFalse("CF should be disabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); + void localSchedulerEnabled() { + assertThat(context.getEnvironment().containsProperty("kubernetes_service_host")).as("K8s should be disabled").isFalse(); + assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).as("CF should be disabled").isFalse(); Scheduler scheduler = context.getBean(Scheduler.class); - assertNotNull(scheduler); - assertTrue(scheduler.getClass().getName().contains("LocalSchedulerAutoConfiguration")); + assertThat(scheduler).isNotNull(); + assertThat(scheduler.getClass().getName()).contains("LocalSchedulerAutoConfiguration"); } } + @Nested @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true", - "kubernetes_service_host=dummy" }) - public static class KubernetesSchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { + "kubernetes_service_host=dummy", "spring.cloud.kubernetes.client.namespace=default" }) + class KubernetesSchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { @Test - public void testKubernetesSchedulerEnabled() { - assertTrue("K8s should be enabled", context.getEnvironment().containsProperty("kubernetes_service_host")); - assertFalse("CF should be disabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); + void kubernetesSchedulerEnabled() { + assertThat(context.getEnvironment().containsProperty("kubernetes_service_host")).as("K8s should be enabled").isTrue(); + assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).as("CF should be disabled").isFalse(); KubernetesSchedulerProperties props = context.getBean(KubernetesSchedulerProperties.class); - assertNotNull(props); + assertThat(props).isNotNull(); } } + @Nested @TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true", "VCAP_APPLICATION=\"{\"instance_id\":\"123\"}\"" }) - public static class CloudFoundrySchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { + class CloudFoundrySchedulerActivatedTests extends AbstractSchedulerPerPlatformTest { @Test - public void testCloudFoundryScheudlerEnabled() { - assertFalse("K8s should be disabled", context.getEnvironment().containsProperty("kubernetes_service_host")); - assertTrue("CF should be enabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())); + void cloudFoundrySchedulerEnabled() { + assertThat(context.getEnvironment() + .containsProperty("kubernetes_service_host")).as("K8s should be disabled").isFalse(); + assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).as("CF should be enabled").isTrue(); - CloudFoundrySchedulerProperties props = context.getBean(CloudFoundrySchedulerProperties.class); - assertNotNull(props); } } } diff --git a/spring-cloud-dataflow-build/README.md b/spring-cloud-dataflow-build/README.md new file mode 100644 index 0000000000..7c459aa9a3 --- /dev/null +++ b/spring-cloud-dataflow-build/README.md @@ -0,0 +1 @@ +# spring-cloud-dataflow-build diff --git a/spring-cloud-dataflow-build/pom.xml b/spring-cloud-dataflow-build/pom.xml new file mode 100644 index 0000000000..90a5df4fc2 --- /dev/null +++ b/spring-cloud-dataflow-build/pom.xml @@ -0,0 +1,739 @@ + + + 4.0.0 + org.springframework.cloud + spring-cloud-dataflow-build + 3.0.0-SNAPSHOT + pom + Spring Cloud Dataflow Build + Spring Cloud Dataflow Build, managing plugins and dependencies + + spring-cloud-dataflow-build-dependencies + spring-cloud-dataflow-dependencies-parent + spring-cloud-dataflow-build-tools + + https://spring.io/projects/spring-cloud-dataflow + + 17 + @ + UTF-8 + UTF-8 + ${basedir} + ${project.artifactId} + + 3.3.7 + + 3.0.0-SNAPSHOT + ${project.build.directory}/build-docs + ${project.build.directory}/refdocs/ + 0.1.3.RELEASE + 2.3.7 + 2.2.9 + ${project.version} + deploy + ${project.version} + + jacoco + reuseReports + ${project.basedir}/../target/jacoco.exec + java + + 3.13.0 + 3.5.0 + 2.10 + 2.5.7 + + ${maven-checkstyle-plugin.version} + 8.29 + 0.0.9 + 3.5.0 + 3.4.1 + 3.0.1 + 3.3.0 + 3.1.0 + 3.1.2 + 1.6.0 + 3.3.1 + 4.9.9 + 3.1.1 + 3.0.0-M2 + 1.6 + 3.1.1 + 2.2.4 + 3.0.0 + 2.2.1 + 1.20 + 0.0.7 + 3.5.0 + false + true + true + true + main + + https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow-build/${spring-cloud-build-checkstyle.branch}/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml + + + https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow-build/${spring-cloud-build-checkstyle.branch}/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml + + + ${project.basedir}/../src/checkstyle/checkstyle-suppressions.xml + + 0.0.2.RELEASE + true + 3.2.10 + 1.8.1 + ${project.basedir}/src/main/asciidoc/_configprops.adoc + .* + generate-resources + generate-resources + + slow,docker + + + + + org.springframework.cloud + spring-cloud-dataflow-build-dependencies + 3.0.0-SNAPSHOT + pom + import + + + + + + + + Pivotal Software, Inc. + https://www.spring.io + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0 + + + + https://github.com/spring-cloud/spring-cloud-dataflow-build + scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git + + + scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git + + HEAD + + + + scdf-team + Data Flow Team + https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors + + + + + + + ${basedir}/src/main/resources + true + + **/application*.yml + **/application*.properties + + + + ${basedir}/src/main/resources + + **/application*.yml + **/application*.properties + + + + + + ${basedir}/src/test/resources + true + + **/application*.yml + **/application*.properties + + + + ${basedir}/src/test/resources + + **/application*.yml + **/application*.properties + + + + + + + + org.apache.maven.plugins + maven-eclipse-plugin + ${maven-eclipse-plugin.version} + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + + org.apache.maven.plugins + maven-failsafe-plugin + ${maven-failsafe-plugin.version} + + --add-opens java.base/java.util=ALL-UNNAMED + ${groups} + ${excludedGroups} + + + + + integration-test + verify + + + + + + org.apache.maven.plugins + maven-jar-plugin + ${maven-jar-plugin.version} + + + + ${start-class} + true + + + + + + org.apache.maven.plugins + maven-antrun-plugin + ${maven-antrun-plugin.version} + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + 1 + 1 + + **/*Tests.java + **/*Test.java + + + **/Abstract*.java + + ${groups} + ${excludedGroups} + + + + org.codehaus.mojo + exec-maven-plugin + ${exec-maven-plugin.version} + + ${start-class} + + + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + + + ${resource.delimiter} + + false + + + + io.spring.javaformat + spring-javaformat-maven-plugin + ${spring-javaformat.version} + + + validate + + ${disable.checks} + + + apply + validate + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + ${maven-checkstyle-plugin.version} + + + com.puppycrawl.tools + checkstyle + ${puppycrawl-tools-checkstyle.version} + + + io.spring.javaformat + spring-javaformat-checkstyle + ${spring-javaformat-checkstyle.version} + + + org.springframework.cloud + spring-cloud-dataflow-build-tools + ${project.version} + + + io.spring.nohttp + nohttp-checkstyle + ${nohttp-checkstyle.version} + + + + + checkstyle-validation + validate + true + + ${disable.checks} + checkstyle.xml + checkstyle-header.txt + + checkstyle.build.directory=${project.build.directory} + checkstyle.suppressions.file=${checkstyle.suppressions.file} + checkstyle.additional.suppressions.file=${checkstyle.additional.suppressions.file} + + true + + + ${maven-checkstyle-plugin.includeTestSourceDirectory} + + ${maven-checkstyle-plugin.failsOnError} + + + ${maven-checkstyle-plugin.failOnViolation} + + + + check + + + + no-http-checkstyle-validation + validate + true + + ${disable.nohttp.checks} + ${checkstyle.nohttp.file} + **/* + **/.idea/**/*,**/.git/**/*,**/target/**/*,**/*.log + ./ + + + check + + + + + + io.github.git-commit-id + git-commit-id-maven-plugin + ${git-commit-id-plugin.version} + + + + revision + + + + + true + + ${project.build.outputDirectory}/git.properties + + full + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + + + repackage + + + + + CLASSIC + ${start-class} + + + + org.apache.maven.plugins + maven-enforcer-plugin + ${maven-enforcer-plugin.version} + + + enforce-versions + + enforce + + + + + false + + + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + + org.apache.maven.plugins + + + maven-checkstyle-plugin + + + [2.17,) + + + check + + + + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-surefire-report-plugin + ${maven-surefire-report-plugin.version} + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + true + ${java.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + false + true + + + + org.apache.maven.plugins + maven-source-plugin + ${maven-source-plugin.version} + + + attach-sources + + jar + + package + + + + + + + + + org.apache.maven.plugins + maven-surefire-report-plugin + ${maven-surefire-report-plugin.version} + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + non-aggregate + + + + javadoc + + + + aggregate + + + + aggregate + + + + + + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + true + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + milestone + + + repo.spring.io + Spring Milestone Repository + https://repo.spring.io/libs-milestone-local + + + + + java8 + + [1.8,) + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + none + + + + + + + sonar + + false + + + + + org.jacoco + jacoco-maven-plugin + 0.8.12 + + ${sonar.jacoco.reportPath} + true + + + + agent + + prepare-agent + + + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + --add-opens java.base/java.util=ALL-UNNAMED + 1 + 1 + + + listener + org.sonar.java.jacoco.JUnitListener + + + + + + + + + + + + + + + + + license + + true + + + + + org.codehaus.mojo + license-maven-plugin + ${license-maven-plugin.version} + + + aggregate-licenses + + license:aggregate-add-third-party + + + + + + + + + fast + + true + + + + failsafe + + + + org.apache.maven.plugins + maven-failsafe-plugin + ${maven-failsafe-plugin.version} + + + + + + checkstyle + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml new file mode 100644 index 0000000000..c871cfb30b --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml @@ -0,0 +1,195 @@ + + + 4.0.0 + org.springframework.cloud + spring-cloud-dataflow-build-dependencies + 3.0.0-SNAPSHOT + Spring Cloud Dataflow Build Dependencies + pom + Spring Cloud Dataflow Build Dependencies: an internal BOM for use with Spring + Cloud Dataflow projects. Use as a BOM or by inheriting from the spring-cloud-dataflow-build. + + + org.springframework.boot + spring-boot-dependencies + + 3.3.5 + + + + 17 + UTF-8 + 2023.0.3 + 3.2.5 + 2.16.1 + 1.12.0 + 1.26.2 + + 9.39.3 + 2.0.0-M4 + 2.3.0 + 3.5.4 + 5.12.4 + 4.13.1 + 32.1.3-jre + + + + + + com.nimbusds + nimbus-jose-jwt + ${nimbus-jose-jwt.version} + + + io.fabric8 + kubernetes-client-bom + ${kubernetes-fabric8-client.version} + pom + import + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + org.springframework.shell + spring-shell-dependencies + ${spring-shell.version} + pom + import + + + com.google.guava + guava + ${guava.version} + + + org.apache.commons + commons-compress + ${commons-compress.version} + + + commons-io + commons-io + ${commons-io.version} + + + org.apache.commons + commons-text + ${commons-text.version} + + + io.micrometer.prometheus + prometheus-rsocket-spring + ${prometheus-rsocket.version} + + + io.micrometer.prometheus + prometheus-rsocket-client + ${prometheus-rsocket.version} + + + io.pivotal.cfenv + java-cfenv + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-boot + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-boot-pivotal-scs + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-boot-pivotal-sso + ${java-cfenv.version} + + + io.pivotal.cfenv + java-cfenv-jdbc + ${java-cfenv.version} + + + io.pivotal.spring.cloud + spring-cloud-services-starter-config-client + ${spring-cloud-services-starter-config-client.version} + + + junit + junit + ${junit.version} + + + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + true + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml new file mode 100644 index 0000000000..786abc0b73 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml @@ -0,0 +1,32 @@ + + + 4.0.0 + spring-cloud-dataflow-build-tools + spring-cloud-dataflow-build-tools + jar + Spring Cloud Dataflow Build Tools + + org.springframework.cloud + spring-cloud-dataflow-build + 3.0.0-SNAPSHOT + + + + com.puppycrawl.tools + checkstyle + ${puppycrawl-tools-checkstyle.version} + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.13.0 + + true + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml new file mode 100644 index 0000000000..6cb6ad2669 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml new file mode 100644 index 0000000000..f5f6705862 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml new file mode 100644 index 0000000000..4e21a0bdf5 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt new file mode 100644 index 0000000000..ff707f0f9e --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt @@ -0,0 +1,15 @@ +^\Q/*\E$ +^\Q * Copyright \E(20\d\d\-)?20\d\d\Q the original author or authors.\E$ +^\Q *\E$ +^\Q * Licensed under the Apache License, Version 2.0 (the "License");\E$ +^\Q * you may not use this file except in compliance with the License.\E$ +^\Q * You may obtain a copy of the License at\E$ +^\Q *\E$ +^\Q * https://www.apache.org/licenses/LICENSE-2.0\E$ +^\Q *\E$ +^\Q * Unless required by applicable law or agreed to in writing, software\E$ +^\Q * distributed under the License is distributed on an "AS IS" BASIS,\E$ +^\Q * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\E$ +^\Q * See the License for the specific language governing permissions and\E$ +^\Q * limitations under the License.\E$ +^\Q */\E$ diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml new file mode 100644 index 0000000000..ff46fb9e86 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml @@ -0,0 +1,203 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml new file mode 100644 index 0000000000..29bac750a8 --- /dev/null +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml @@ -0,0 +1,130 @@ + + + 4.0.0 + org.springframework.cloud + 3.0.0-SNAPSHOT + spring-cloud-dataflow-dependencies-parent + pom + Spring Cloud Dataflow Dependencies Parent + Spring Cloud Data Flow Build Dependencies + https://projects.spring.io/spring-cloud/ + + Pivotal Software, Inc. + https://www.spring.io + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0 + + + + https://github.com/spring-cloud/spring-cloud-dataflow-build + scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git + + + scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git + + HEAD + + + + scdf-team + Data Flow Team + https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors + + + + 17 + UTF-8 + + 3.0.0-SNAPSHOT + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + true + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + 1.4.1 + + + enforce-versions + + enforce + + + + + false + + + + + + + + + diff --git a/spring-cloud-dataflow-classic-docs/pom.xml b/spring-cloud-dataflow-classic-docs/pom.xml index bcb8389960..2e63126b78 100644 --- a/spring-cloud-dataflow-classic-docs/pom.xml +++ b/spring-cloud-dataflow-classic-docs/pom.xml @@ -4,7 +4,8 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-classic-docs Spring Cloud Data Flow Docs for Classic mode @@ -13,38 +14,42 @@ org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-client - - - org.springframework.cloud - spring-cloud-dataflow-shell-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-completion + ${project.version} org.springframework.cloud spring-cloud-starter-dataflow-server + ${project.version} test-jar test @@ -73,20 +78,46 @@ spring-boot-starter-test test - + + com.h2database + h2 + test + + + org.awaitility + awaitility + test + + org.apache.maven.plugins - maven-surefire-plugin - - false - - **/*Documentation.java - **/*Tests.java - - + maven-jar-plugin + 3.3.0 + + + docs + + + + org.apache.maven.plugins + maven-surefire-plugin + + 1 + 1 + true + false + + **/*Documentation.java + + + + + + + diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java index 4fb34a5ec9..cf65bf77a2 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java @@ -16,7 +16,7 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.http.MediaType; import org.springframework.restdocs.payload.JsonFieldType; @@ -29,12 +29,18 @@ /** * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Chris Bono + * @author Corneil du Plessis */ -public class AboutDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +class AboutDocumentation extends BaseDocumentation { @Test - public void getMetaInformation() throws Exception { - this.mockMvc.perform(get("/about").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk()) + void getMetaInformation() throws Exception { + this.mockMvc.perform( + get("/about") + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isOk()) .andDo(this.documentationHandler.document(responseFields( fieldWithPath("_links.self.href").description("Link to the runtime environment resource"), @@ -156,8 +162,36 @@ public void getMetaInformation() throws Exception { fieldWithPath("monitoringDashboardInfo.source").type(JsonFieldType.STRING).description( "Unique DataFlow identifier within the monitoring system."), fieldWithPath("monitoringDashboardInfo.refreshInterval").type(JsonFieldType.NUMBER).description( - "Provides the time interval (in seconds) for updating the monitoring dashboards.") + "Provides the time interval (in seconds) for updating the monitoring dashboards."), + fieldWithPath("gitAndBuildInfo").type(JsonFieldType.OBJECT).description( + "Provides the git and build info for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git").type(JsonFieldType.OBJECT).description( + "Provides the git details for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.branch").type(JsonFieldType.STRING).description( + "Provides the git branch for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit").type(JsonFieldType.OBJECT).description( + "Provides the git commit info for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.id").type(JsonFieldType.OBJECT).description( + "Provides the git commit id for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.id.abbrev").type(JsonFieldType.STRING).description( + "Provides the short git commit id for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.id.full").type(JsonFieldType.STRING).description( + "Provides the full git commit id for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.git.commit.time").type(JsonFieldType.STRING).description( + "Provides the git commit time for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build").type(JsonFieldType.OBJECT).description( + "Provides the build details for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.artifact").type(JsonFieldType.STRING).description( + "Provides the build artifact for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.name").type(JsonFieldType.STRING).description( + "Provides the build name for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.time").type(JsonFieldType.STRING).description( + "Provides the build time for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.version").type(JsonFieldType.STRING).description( + "Provides the build version for the Dataflow server"), + fieldWithPath("gitAndBuildInfo.build.group").type(JsonFieldType.STRING).description( + "Provides the build group for the Dataflow server") ))); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java index b3f875e032..17628b7566 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java @@ -16,9 +16,8 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import javax.servlet.RequestDispatcher; - -import org.junit.Test; +import jakarta.servlet.RequestDispatcher; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.rest.Version; import org.springframework.restdocs.payload.JsonFieldType; @@ -32,7 +31,6 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -40,18 +38,20 @@ * @author Gunnar Hillert * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class ApiDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +class ApiDocumentation extends BaseDocumentation { @Test - public void headers() throws Exception { + void headers() throws Exception { this.mockMvc.perform(get("/")).andExpect(status().isOk()) .andDo(this.documentationHandler.document(responseHeaders(headerWithName("Content-Type") .description("The Content-Type of the payload, e.g. " + "`application/hal+json`")))); } @Test - public void errors() throws Exception { + void errors() throws Exception { this.mockMvc .perform(get("/error").requestAttr(RequestDispatcher.ERROR_STATUS_CODE, 400) .requestAttr(RequestDispatcher.ERROR_REQUEST_URI, "/apps").requestAttr( @@ -71,8 +71,10 @@ public void errors() throws Exception { } @Test - public void index() throws Exception { - this.mockMvc.perform(get("/")).andDo(print()).andExpect(status().isOk()).andDo(this.documentationHandler.document(links( + void index() throws Exception { + this.mockMvc.perform(get("/")) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document(links( linkWithRel("about").description( "Access meta information, including enabled " + "features, security info, version information"), @@ -102,12 +104,18 @@ public void index() throws Exception { linkWithRel("runtime/apps/{appId}").description("Exposes the runtime status for a specific app"), linkWithRel("runtime/apps/{appId}/instances").description("Provides the status for app instances"), linkWithRel("runtime/apps/{appId}/instances/{instanceId}").description("Provides the status for specific app instance"), + linkWithRel("runtime/apps/{appId}/instances/{instanceId}/actuator").description("EXPERIMENTAL: Allows invoking Actuator endpoint on specific app instance"), + linkWithRel("runtime/apps/{appId}/instances/{instanceId}/post").description("EXPERIMENTAL: Allows POST on http sink"), linkWithRel("tasks/definitions").description("Provides the task definition resource"), linkWithRel("tasks/definitions/definition").description("Provides details for a specific task definition"), linkWithRel("tasks/validation").description("Provides the validation for a task definition"), - linkWithRel("tasks/executions").description("Returns Task executions and allows launching of tasks"), + linkWithRel("tasks/executions").description("Returns Task executions"), + linkWithRel("tasks/executions/launch").description("Provides for launching a Task execution"), + linkWithRel("tasks/executions/external").description("Returns Task execution by external id"), linkWithRel("tasks/executions/current").description("Provides the current count of running tasks"), + linkWithRel("tasks/thinexecutions").description("Returns thin Task executions"), + linkWithRel("tasks/thinexecutions/name").description("Returns thin Task executions for a given task name"), linkWithRel("tasks/info/executions").description("Provides the task executions info"), linkWithRel("tasks/schedules").description("Provides schedule information of tasks"), linkWithRel("tasks/schedules/instances").description("Provides schedule information of a specific task "), @@ -141,6 +149,7 @@ public void index() throws Exception { fieldWithPath("['" + Version.REVISION_KEY + "']").description("Incremented each time a change is implemented in this REST API"), fieldWithPath("_links.audit-records.href").description("Link to the audit records"), fieldWithPath("_links.dashboard.href").description("Link to the dashboard"), + fieldWithPath("_links.streams/definitions.href").description("Link to the streams/definitions"), fieldWithPath("_links.streams/definitions/definition.href").description("Link to the streams/definitions/definition"), fieldWithPath("_links.streams/definitions/definition.templated").type(JsonFieldType.BOOLEAN).optional().description("Link streams/definitions/definition is templated"), @@ -152,6 +161,11 @@ public void index() throws Exception { fieldWithPath("_links.runtime/apps/{appId}/instances.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances is templated"), fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}.href").description("Link to the runtime/apps/{appId}/instances/{instanceId}"), fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId} is templated"), + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/post.href").description("Link to the runtime/apps/{appId}/instances/{instanceId}/post"), + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/post.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId}/post is templated"), + + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/actuator[].href").description("Link to the runtime/apps/{appId}/instances/{instanceId}/actuator"), + fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/actuator[].templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId}/actuator is templated"), fieldWithPath("_links.runtime/streams.href").description("Link to the runtime/streams"), fieldWithPath("_links.runtime/streams.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/streams is templated"), @@ -195,11 +209,21 @@ public void index() throws Exception { fieldWithPath("_links.tasks/definitions/definition.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/definitions/definition is templated"), fieldWithPath("_links.tasks/executions.href").description("Link to the tasks/executions"), + fieldWithPath("_links.tasks/executions/launch.href").description("Link to tasks/executions/launch"), + fieldWithPath("_links.tasks/executions/launch.templated").type(JsonFieldType.BOOLEAN).optional().description("Indicates that Link tasks/executions/launch is templated"), fieldWithPath("_links.tasks/executions/name.href").description("Link to the tasks/executions/name"), fieldWithPath("_links.tasks/executions/name.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/name is templated"), fieldWithPath("_links.tasks/executions/current.href").description("Link to the tasks/executions/current"), fieldWithPath("_links.tasks/executions/execution.href").description("Link to the tasks/executions/execution"), fieldWithPath("_links.tasks/executions/execution.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/execution is templated"), + fieldWithPath("_links.tasks/executions/external.href").description("Link to the tasks/executions/external"), + fieldWithPath("_links.tasks/executions/external.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/external is templated"), + + fieldWithPath("_links.tasks/thinexecutions.href").description("Link to the tasks/thinexecutions"), + + fieldWithPath("_links.tasks/thinexecutions/name.href").description("Link to the tasks/thinexecutions/name"), + fieldWithPath("_links.tasks/thinexecutions/name.templated").type(JsonFieldType.BOOLEAN).optional().description("Link to the tasks/thinexecutions/name is templated"), + fieldWithPath("_links.tasks/info/executions.href").description("Link to the tasks/info/executions"), fieldWithPath("_links.tasks/info/executions.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/info is templated"), diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java index 1f1a3b3366..22ada75374 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ import java.util.Arrays; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.http.MediaType; @@ -32,7 +32,7 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** @@ -42,197 +42,205 @@ * @author Gunnar Hillert * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class AppRegistryDocumentation extends BaseDocumentation { - - @Test - public void appDefault() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - - this.mockMvc.perform(RestDocumentationRequestBuilders - .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.2.0.RELEASE").accept(MediaType.APPLICATION_JSON)) - .andExpect(status().isAccepted()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application"), - parameterWithName("version").description("The version of the application") - ) - ) - ); - unregisterApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - unregisterApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - } - - @Test - public void registeringAnApplicationVersion() throws Exception { - this.mockMvc.perform( - post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.1.0.RELEASE") - .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type") - .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values()) + " (optional)"), - parameterWithName("name").description("The name of the application to register"), - parameterWithName("version").description("The version of the application to register") - ), - requestParameters( - parameterWithName("uri").description("URI where the application bits reside"), - parameterWithName("metadata-uri").optional() - .description("URI where the application metadata jar can be found"), - parameterWithName("force").optional() - .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); - - unregisterApp(ApplicationType.source, "http", "1.1.0.RELEASE"); - } +@SuppressWarnings("NewClassNamingConvention") +class AppRegistryDocumentation extends BaseDocumentation { + @Test + void appDefault() throws Exception { + registerApp(ApplicationType.source, "http", "4.0.0"); + registerApp(ApplicationType.source, "http", "5.0.0"); + + this.mockMvc.perform(RestDocumentationRequestBuilders + .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "4.0.0") + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isAccepted()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application"), + parameterWithName("version").description("The version of the application") + ) + ) + ); + unregisterApp(ApplicationType.source, "http", "4.0.0"); + unregisterApp(ApplicationType.source, "http", "5.0.0"); + } @Test - public void bulkRegisteringApps() throws Exception { + void registeringAnApplicationVersion() throws Exception { this.mockMvc.perform( - post("/apps") - .param("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") - .param("force", "false")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - requestParameters( - parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."), - parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."), - parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); - unregisterApp(ApplicationType.source, "http"); + post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "4.0.0").queryParam("uri", + "maven://org.springframework.cloud.stream.app:http-source-rabbit:4.0.0") + ).andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").optional() + .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to register"), + parameterWithName("version").description("The version of the application to register") + ), + queryParameters( + parameterWithName("uri").description("URI where the application bits reside"), + parameterWithName("metadata-uri").optional() + .description("URI where the application metadata jar can be found"), + parameterWithName("force").optional() + .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") + ) + ) + ); + + unregisterApp(ApplicationType.source, "http", "4.0.0"); } - @Test - public void getApplicationsFiltered() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "time", "1.2.0.RELEASE"); - this.mockMvc.perform( - get("/apps") - .param("search", "") - .param("type", "source").accept(MediaType.APPLICATION_JSON) - .param("defaultVersion", "true") - .param("page", "0") - .param("size", "10") - .param("sort", "name,ASC") - ) - .andExpect(status().isOk()) - .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("search").description("The search string performed on the name (optional)"), - parameterWithName("type") - .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("defaultVersion").description("The boolean flag to set to retrieve only the apps of the default versions (optional)"), - parameterWithName("page").description("The zero-based page number (optional)"), - parameterWithName("sort").description("The sort on the list (optional)"), - parameterWithName("size").description("The requested page size (optional)") - ), - responseFields( - subsectionWithPath("_embedded.appRegistrationResourceList") - .description("Contains a collection of application"), - subsectionWithPath("_links.self").description("Link to the applications resource"), - subsectionWithPath("page").description("Pagination properties") - ) - )); - - unregisterApp(ApplicationType.source, "http"); - unregisterApp(ApplicationType.source, "time"); - } @Test - public void getSingleApplication() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + void bulkRegisteringApps() throws Exception { this.mockMvc.perform( - get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON) - .param("exhaustive", "false")) - .andExpect(status().isOk()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to query") - ), - requestParameters( - parameterWithName("exhaustive").optional() - .description("Return all application properties, including common Spring Boot properties") - ), - responseFields( - fieldWithPath("name").description("The name of the application"), - fieldWithPath("label").description("The label name of the application"), - fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())), - fieldWithPath("uri").description("The uri of the application"), - fieldWithPath("version").description("The version of the application"), - fieldWithPath("versions").description("All the registered versions of the application"), - fieldWithPath("defaultVersion").description("If true, the application is the default version"), - subsectionWithPath("options").description("The options of the application (Array)"), - fieldWithPath("shortDescription").description("The description of the application"), - fieldWithPath("inboundPortNames").description("Inbound port names of the application"), - fieldWithPath("outboundPortNames").description("Outbound port names of the application") - ) + post("/apps") + .queryParam("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:4.0.0") + .queryParam("force", "false") ) - ); + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + queryParameters( + parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."), + parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."), + parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") + ) + ) + ); unregisterApp(ApplicationType.source, "http"); } - @Test - public void registeringAnApplication() throws Exception { - this.mockMvc.perform( - post("/apps/{type}/{name}", ApplicationType.source, "http") - .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to register") - ), - requestParameters( - parameterWithName("uri").description("URI where the application bits reside"), - parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"), - parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) + @Test + void getApplicationsFiltered() throws Exception { + registerApp(ApplicationType.source, "http", "5.0.0"); + registerApp(ApplicationType.source, "time", "5.0.0"); + this.mockMvc.perform( + get("/apps") + .param("search", "") + .param("type", "source").accept(MediaType.APPLICATION_JSON) + .param("defaultVersion", "true") + .param("page", "0") + .param("size", "10") + .param("sort", "name,ASC") ) - ); - - unregisterApp(ApplicationType.source, "http"); - } - - @Test - public void unregisteringAnApplication() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - - this.mockMvc.perform( - delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "1.2.0.RELEASE")) - .andExpect(status().isOk()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to unregister"), - parameterWithName("version").description("The version of the application to unregister (optional)") - ) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + queryParameters( + parameterWithName("search").optional() + .description("The search string performed on the name"), + parameterWithName("type") + .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("defaultVersion").optional().description("The boolean flag to set to retrieve only the apps of the default versions"), + parameterWithName("page").optional().description("The zero-based page number"), + parameterWithName("sort").optional().description("The sort on the list"), + parameterWithName("size").optional().description("The requested page size") + ), + responseFields( + subsectionWithPath("_embedded.appRegistrationResourceList") + .description("Contains a collection of application"), + subsectionWithPath("_links.self").description("Link to the applications resource"), + subsectionWithPath("page").description("Pagination properties") + ) + )); + + unregisterApp(ApplicationType.source, "http"); + unregisterApp(ApplicationType.source, "time"); + } + + @Test + void getSingleApplication() throws Exception { + registerApp(ApplicationType.source, "http", "5.0.0"); + this.mockMvc.perform( + get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON) + .param("exhaustive", "false")) + .andExpect(status().isOk()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to query") + ), + queryParameters( + parameterWithName("exhaustive").optional() + .description("Return all application properties, including common Spring Boot properties") + ), + responseFields( + fieldWithPath("name").description("The name of the application"), + fieldWithPath("label").description("The label name of the application"), + fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())), + fieldWithPath("uri").description("The uri of the application"), + fieldWithPath("metaDataUri").description("The uri of the application metadata").optional(), + fieldWithPath("version").description("The version of the application"), + fieldWithPath("versions").description("All the registered versions of the application"), + fieldWithPath("defaultVersion").description("If true, the application is the default version"), + subsectionWithPath("options").description("The options of the application (Array)"), + fieldWithPath("shortDescription").description("The description of the application"), + fieldWithPath("inboundPortNames").description("Inbound port names of the application"), + fieldWithPath("outboundPortNames").description("Outbound port names of the application"), + fieldWithPath("optionGroups").description("Option groups of the application") + ) + ) + ); + unregisterApp(ApplicationType.source, "http"); + } + + @Test + void registeringAnApplication() throws Exception { + this.mockMvc.perform( + post("/apps/{type}/{name}", ApplicationType.source, "http") + .queryParam("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:5.0.0") ) - ); - } - - @Test - public void unregisteringAllApplications() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - this.mockMvc.perform( - delete("/apps")) - .andExpect(status().isOk() - ); - } + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to register") + ), + queryParameters( + parameterWithName("uri").description("URI where the application bits reside"), + parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"), + parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") + ) + ) + ); + + unregisterApp(ApplicationType.source, "http"); + } + + @Test + void unregisteringAnApplication() throws Exception { + registerApp(ApplicationType.source, "http", "5.0.0"); + + this.mockMvc.perform( + delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "5.0.0")) + .andExpect(status().isOk()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to unregister"), + parameterWithName("version").optional().description("The version of the application to unregister") + ) + ) + ); + } + + @Test + void unregisteringAllApplications() throws Exception { + registerApp(ApplicationType.source, "http", "4.0.0"); + registerApp(ApplicationType.source, "http", "5.0.0"); + this.mockMvc.perform( + delete("/apps")) + .andExpect(status().isOk() + ); + } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java index a9f2c3cbc7..3c54a3cc1e 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java @@ -16,10 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; @@ -28,26 +29,21 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Documentation for the {@code /audit-records} endpoint. * * @author Gunnar Hillert + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class AuditRecordsDocumentation extends BaseDocumentation { - - private static boolean setUpIsDone = false; - - @Before - public void setup() throws Exception { - if (setUpIsDone) { - return; - } +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class AuditRecordsDocumentation extends BaseDocumentation { + @BeforeEach + void setup() throws Exception { this.mockMvc.perform( post("/apps/{type}/time", "source") .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE")) @@ -62,11 +58,10 @@ public void setup() throws Exception { .param("definition", "time --format='YYYY MM DD' | log") .param("deploy", "false")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test - public void listAllAuditRecords() throws Exception { + void listAllAuditRecords() throws Exception { this.mockMvc.perform( get("/audit-records") .param("page", "0") @@ -76,18 +71,17 @@ public void listAllAuditRecords() throws Exception { .param("fromDate", "2000-01-01T00:00:00") .param("toDate", "2099-01-01T00:00:00") ) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page").description("The zero-based page number (optional)"), - parameterWithName("size").description("The requested page size (optional)"), - parameterWithName("operations").description("Comma-separated list of Audit Operations (optional)"), - parameterWithName("actions").description("Comma-separated list of Audit Actions (optional)"), - parameterWithName("fromDate") - .description("From date filter (ex.: 2019-02-03T00:00:30) (optional)"), - parameterWithName("toDate") - .description("To date filter (ex.: 2019-02-03T00:00:30) (optional)") + queryParameters( + parameterWithName("page").optional().description("The zero-based page number"), + parameterWithName("size").optional().description("The requested page size"), + parameterWithName("operations").optional().description("Comma-separated list of Audit Operations"), + parameterWithName("actions").optional().description("Comma-separated list of Audit Actions"), + parameterWithName("fromDate").optional() + .description("From date filter (ex.: 2019-02-03T00:00:30)"), + parameterWithName("toDate").optional() + .description("To date filter (ex.: 2019-02-03T00:00:30)") ), responseFields( subsectionWithPath("_embedded.auditRecordResourceList") @@ -97,18 +91,17 @@ public void listAllAuditRecords() throws Exception { } @Test - public void getAuditRecord() throws Exception { + void getAuditRecord() throws Exception { this.mockMvc.perform( get("/audit-records/{id}", "5")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("id").description("The id of the audit record to query (required)") + parameterWithName("id").description("The id of the audit record to query") ), responseFields( fieldWithPath("auditRecordId").description("The id of the audit record"), - fieldWithPath("createdBy").description("The author of the audit record (optional)"), + fieldWithPath("createdBy").optional().description("The author of the audit record"), fieldWithPath("correlationId").description("The correlation ID of the audit record"), fieldWithPath("auditData").description("The data of the audit record"), fieldWithPath("createdOn").description("The creation date of the audit record"), @@ -121,19 +114,17 @@ public void getAuditRecord() throws Exception { } @Test - public void getAuditActionTypes() throws Exception { + void getAuditActionTypes() throws Exception { this.mockMvc.perform( get("/audit-records/audit-action-types")) - .andDo(print()) .andExpect(status().isOk() ); } @Test - public void getAuditOperationTypes() throws Exception { + void getAuditOperationTypes() throws Exception { this.mockMvc.perform( get("/audit-records/audit-operation-types")) - .andDo(print()) .andExpect(status().isOk() ); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java index 76318492e5..acaffed639 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,7 +17,7 @@ package org.springframework.cloud.dataflow.server.rest.documentation; import java.util.ArrayList; -import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -25,9 +25,9 @@ import javax.sql.DataSource; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; import org.mockito.ArgumentMatchers; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -36,6 +36,7 @@ import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.single.LocalDataflowResource; +import org.springframework.cloud.deployer.spi.app.ActuatorOperations; import org.springframework.cloud.deployer.spi.app.AppDeployer; import org.springframework.cloud.deployer.spi.scheduler.ScheduleInfo; import org.springframework.cloud.deployer.spi.scheduler.ScheduleRequest; @@ -47,9 +48,11 @@ import org.springframework.cloud.skipper.domain.Status; import org.springframework.cloud.skipper.domain.StatusCode; import org.springframework.cloud.skipper.domain.VersionInfo; +import org.springframework.context.ApplicationContext; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; -import org.springframework.restdocs.JUnitRestDocumentation; +import org.springframework.restdocs.RestDocumentationContextProvider; +import org.springframework.restdocs.RestDocumentationExtension; import org.springframework.restdocs.mockmvc.RestDocumentationResultHandler; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; @@ -73,16 +76,19 @@ * @author Gunnar Hillert * @author Ilayaperumal Gopinathan * @author Glenn Renfro + * @author Corneil du Plessis */ +@ExtendWith(RestDocumentationExtension.class) public abstract class BaseDocumentation { private static String skipperServerPort; - @ClassRule + @RegisterExtension public final static LocalDataflowResource springDataflowServer = new LocalDataflowResource( - "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort); - @Before - public void setupMocks() throws Exception{ + "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort); + + @BeforeEach + public void setupMocks(RestDocumentationContextProvider restDocumentationContextProvider) throws Exception { reset(springDataflowServer.getSkipperClient()); AboutResource about = new AboutResource(); @@ -98,19 +104,17 @@ public void setupMocks() throws Exception{ info.getStatus().setStatusCode(StatusCode.UNKNOWN); when(springDataflowServer.getSkipperClient().status(ArgumentMatchers.anyString())).thenReturn(info); - Deployer deployer = new Deployer("default", "local", mock(AppDeployer.class)); - when(springDataflowServer.getSkipperClient().listDeployers()).thenReturn(Arrays.asList(deployer)); + Deployer deployer = new Deployer("default", "local", mock(AppDeployer.class), mock(ActuatorOperations.class)); + when(springDataflowServer.getSkipperClient().listDeployers()).thenReturn(Collections.singletonList(deployer)); when(springDataflowServer.getSkipperClient().search(ArgumentMatchers.anyString(), ArgumentMatchers.anyBoolean())).thenReturn(new ArrayList<>()); - this.prepareDocumentationTests(springDataflowServer.getWebApplicationContext()); + this.prepareDocumentationTests(springDataflowServer.getWebApplicationContext(), + restDocumentationContextProvider); } public static final String TARGET_DIRECTORY = "target/generated-snippets"; - @Rule - public JUnitRestDocumentation restDocumentation = new JUnitRestDocumentation(TARGET_DIRECTORY); - protected MockMvc mockMvc; protected RestDocumentationResultHandler documentationHandler; @@ -119,74 +123,80 @@ public void setupMocks() throws Exception{ protected DataSource dataSource; - protected void prepareDocumentationTests(WebApplicationContext context) throws Exception{ + protected ApplicationContext context; + + protected void prepareDocumentationTests(WebApplicationContext context, + RestDocumentationContextProvider restDocumentationContextProvider) { + this.context = context; this.documentationHandler = document("{class-name}/{method-name}", preprocessResponse(prettyPrint())); this.documentation = new ToggleableResultHandler(documentationHandler); this.mockMvc = MockMvcBuilders.webAppContextSetup(context) - .apply(documentationConfiguration(this.restDocumentation).uris().withPort(9393)) - .alwaysDo((ToggleableResultHandler)this.documentation).build(); + .apply(documentationConfiguration(restDocumentationContextProvider).uris().withPort(9393)) + .alwaysDo((ToggleableResultHandler) this.documentation).build(); this.dataSource = springDataflowServer.getWebApplicationContext().getBean(DataSource.class); - TaskSchedulerController controller = this.springDataflowServer.getWebApplicationContext().getBean(TaskSchedulerController.class); + TaskSchedulerController controller = springDataflowServer.getWebApplicationContext() + .getBean(TaskSchedulerController.class); ReflectionTestUtils.setField(controller, "schedulerService", schedulerService()); - TaskPlatform taskPlatform = this.springDataflowServer.getWebApplicationContext().getBean(TaskPlatform.class); + TaskPlatform taskPlatform = springDataflowServer.getWebApplicationContext().getBean(TaskPlatform.class); Launcher launcher = taskPlatform.getLaunchers().stream().filter(launcherToFilter -> launcherToFilter.getName().equals("default")).findFirst().get(); ReflectionTestUtils.setField(launcher, "scheduler", localTestScheduler()); } /** * Can be used by subclasses to easily register dummy apps, as most endpoints require apps to be effective - * @param type the type of app to register - * @param name the name of the app to register + * + * @param type the type of app to register + * @param name the name of the app to register * @param version the version to register */ void registerApp(ApplicationType type, String name, String version) throws Exception { - String group = type == ApplicationType.task ? "org.springframework.cloud.task.app" : "org.springframework.cloud.stream.app"; + String group = type == ApplicationType.task ? "io.spring" : "org.springframework.cloud.stream.app"; String binder = type == ApplicationType.task ? "" : "-rabbit"; documentation.dontDocument( () -> this.mockMvc.perform( - post(String.format("/apps/%s/%s/%s", type, name, version)) - .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version))) + post(String.format("/apps/%s/%s/%s", type, name, version)) + .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version))) .andExpect(status().isCreated()) ); } void unregisterApp(ApplicationType type, String name) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete(String.format("/apps/%s/%s", type, name)) - ) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete(String.format("/apps/%s/%s", type, name)) + ) + .andExpect(status().isOk()) ); } void unregisterApp(ApplicationType type, String name, String version) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete(String.format("/apps/%s/%s/%s", type, name, version)) - ) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete(String.format("/apps/%s/%s/%s", type, name, version)) + ) + .andExpect(status().isOk()) ); } - void createStream(String name, String definition, boolean deploy) throws Exception{ + void createStream(String name, String definition, boolean deploy) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - post("/streams/definitions") - .param("name", name) - .param("definition", definition) - .param("deploy", String.valueOf(deploy))) - .andExpect(status().isCreated()) + () -> this.mockMvc.perform( + post("/streams/definitions") + .param("name", name) + .param("definition", definition) + .param("deploy", String.valueOf(deploy))) + .andExpect(status().isCreated()) ); } - void destroyStream(String name) throws Exception{ + void destroyStream(String name) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete("/streams/definitions/{name}", name)) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete("/streams/definitions/{name}", name)) + .andExpect(status().isOk()) ); } @@ -194,6 +204,7 @@ void destroyStream(String name) throws Exception{ * A {@link ResultHandler} that can be turned off and on. * * @author Eric Bottard + * @author Corneil du Plessis */ private static class ToggleableResultHandler implements ResultHandler, RestDocs { private final ResultHandler delegate; @@ -230,6 +241,7 @@ public void dontDocument(Callable action) throws Exception { * are not documented. * * @author Eric Bottard + * @author Corneil du Plessis */ @FunctionalInterface public interface RestDocs { @@ -240,8 +252,8 @@ public SchedulerService schedulerService() { return new SchedulerService() { @Override public void schedule(String scheduleName, String taskDefinitionName, - Map taskProperties, List commandLineArgs, - String platformName) { + Map taskProperties, List commandLineArgs, + String platformName) { } @Override @@ -264,7 +276,7 @@ public void unscheduleForTaskDefinition(String taskDefinitionName) { @Override public List list(Pageable pageable, String taskDefinitionName, - String platformName) { + String platformName) { return null; } @@ -316,7 +328,7 @@ private List getSampleList() { scheduleInfo.setScheduleName("FOO"); scheduleInfo.setTaskDefinitionName("BAR"); Map props = new HashMap<>(1); - props.put("scheduler.AAA.spring.cloud.scheduler.cron.expression", "00 41 17 ? * *"); + props.put("deployer.AAA.spring.cloud.scheduler.cron.expression", "00 41 17 ? * *"); scheduleInfo.setScheduleProperties(props); result.add(scheduleInfo); return result; @@ -345,4 +357,4 @@ public List list() { } }; } - } +} diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java index ee3257a325..67011d046d 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,40 +16,36 @@ package org.springframework.cloud.dataflow.server.rest.documentation; +import java.time.LocalDateTime; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.batch.core.repository.JobRestartException; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.TaskManifest; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.http.MediaType; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.restdocs.payload.JsonFieldType; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; +import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.put; @@ -58,306 +54,306 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + /** * Documentation for the /jobs/executions endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) +@SuppressWarnings("NewClassNamingConvention") +@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class}) @DirtiesContext -public class JobExecutionsDocumentation extends BaseDocumentation { +class JobExecutionsDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; - private static boolean initialized; private JobRepository jobRepository; - private TaskExecutionDao dao; + + private TaskExecutionDao taskExecutionDao; + private TaskBatchDao taskBatchDao; + private JdbcTemplate jdbcTemplate; + private DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao; - @Before - public void setup() throws Exception { - if (!initialized) { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); - initialize(); - createJobExecution(JOB_NAME, BatchStatus.STARTED); - createJobExecution(JOB_NAME + "1", BatchStatus.STOPPED); + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); + initialize(); + createJobExecution(JOB_NAME, BatchStatus.STARTED); + createJobExecution(JOB_NAME + "1", BatchStatus.STOPPED); - jdbcTemplate = new JdbcTemplate(this.dataSource); - jdbcTemplate.afterPropertiesSet(); - jdbcTemplate.update( - "INSERT into task_deployment(id, object_version, task_deployment_id, task_definition_name, platform_name, created_on) " + - "values (?,?,?,?,?,?)", - 1, 1, "2", JOB_NAME + "_1", "default", new Date()); - documentation.dontDocument(() -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", "DOCJOB1") - .param("definition", "timestamp --format='YYYY MM DD'")) - .andExpect(status().isOk())); + jdbcTemplate = new JdbcTemplate(this.dataSource); + jdbcTemplate.afterPropertiesSet(); + jdbcTemplate.update( + "INSERT into task_deployment(id, object_version, task_deployment_id, task_definition_name, platform_name, created_on) " + + "values (?,?,?,?,?,?)", + 1, 1, "2", JOB_NAME + "_1", "default", new Date()); - initialized = true; - } + documentation.dontDocument( + () -> this.mockMvc + .perform(post("/tasks/definitions").queryParam("name", "DOCJOB1") + .queryParam("definition", "timestamp --format='YYYY MM DD'")) + .andExpect(status().isOk())); } @Test - public void listJobExecutions() throws Exception { + void listJobExecutions() throws Exception { this.mockMvc.perform( - get("/jobs/executions") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/executions") + .queryParam("page", "0") + .queryParam("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - responseFields( - subsectionWithPath("_embedded.jobExecutionResourceList") - .description("Contains a collection of Job Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size")), + responseFields( + subsectionWithPath("_embedded.jobExecutionResourceList") + .description("Contains a collection of Job Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") ))); } @Test - public void listThinJobExecutions() throws Exception { + void listThinJobExecutions() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/thinexecutions") + .queryParam("page", "0") + .queryParam("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test - public void listThinJobExecutionsByJobInstanceId() throws Exception { + void listThinJobExecutionsByJobInstanceId() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("jobInstanceId", "1")) - .andDo(print()) + get("/jobs/thinexecutions") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("jobInstanceId", "1")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("jobInstanceId") - .description("Filter result by the job instance id")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("jobInstanceId") + .description("Filter result by the job instance id")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test - public void listThinJobExecutionsByTaskExecutionId() throws Exception { + void listThinJobExecutionsByTaskExecutionId() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("taskExecutionId", "1")) - .andDo(print()) + get("/jobs/thinexecutions") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("taskExecutionId", "1")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("taskExecutionId") - .description("Filter result by the task execution id")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("taskExecutionId") + .description("Filter result by the task execution id")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test - public void listThinJobExecutionsByDate() throws Exception { + void listThinJobExecutionsByDate() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("fromDate", "2000-09-24T17:00:45,000") - .param("toDate", "2050-09-24T18:00:45,000")) - .andDo(print()) + get("/jobs/thinexecutions") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("fromDate", "2000-09-24T17:00:45,000") + .queryParam("toDate", "2050-09-24T18:00:45,000")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("fromDate") - .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"), - parameterWithName("toDate") - .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("fromDate") + .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"), + parameterWithName("toDate") + .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test - public void listJobExecutionsByName() throws Exception { + void listJobExecutionsByName() throws Exception { this.mockMvc.perform( - get("/jobs/executions") - .param("name", JOB_NAME) - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/executions") + .queryParam("name", JOB_NAME) + .queryParam("page", "0") + .queryParam("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the job execution")), - responseFields( - subsectionWithPath("_embedded.jobExecutionResourceList") - .description("Contains a collection of Job Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("name") + .description("The name associated with the job execution")), + responseFields( + subsectionWithPath("_embedded.jobExecutionResourceList") + .description("Contains a collection of Job Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") ))); } @Test - public void listThinJobExecutionsByName() throws Exception { + void listThinJobExecutionsByName() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("name", JOB_NAME) - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/jobs/thinexecutions") + .queryParam("name", JOB_NAME) + .queryParam("page", "0") + .queryParam("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the job execution")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("name") + .description("The name associated with the job execution")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test - public void jobDisplayDetail() throws Exception { + void jobDisplayDetail() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}", "2")) - .andDo(print()) - .andExpect(status().isOk()) - .andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)") - ), - responseFields( - fieldWithPath("executionId").description("The execution ID of the job execution"), - fieldWithPath("stepExecutionCount").description("the number of step of the job execution"), - fieldWithPath("jobId").description("The job ID of the job execution"), - fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"), - fieldWithPath("name").description("The name of the job execution"), - fieldWithPath("startDate").description("The start date of the job execution"), - fieldWithPath("startTime").description("The start time of the job execution"), - fieldWithPath("duration").description("The duration of the job execution"), - fieldWithPath("jobParameters").description("The parameters of the job execution"), - fieldWithPath("jobParametersString").description("The parameters string of the job execution"), - fieldWithPath("restartable").description("The status restartable of the job execution"), - fieldWithPath("abandonable").description("The status abandonable of the job execution"), - fieldWithPath("stoppable").description("The status stoppable of the job execution"), - fieldWithPath("defined").description("The status defined of the job execution"), - fieldWithPath("timeZone").description("The time zone of the job execution"), - subsectionWithPath("jobExecution").description("The details of the job execution"), - subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"), - subsectionWithPath("_links.self").description("Link to the stream definition resource") + get("/jobs/executions/{id}", "2") ) - )); + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution") + ), + responseFields( + fieldWithPath("executionId").description("The execution ID of the job execution"), + fieldWithPath("stepExecutionCount").description("the number of step of the job execution"), + fieldWithPath("jobId").description("The job ID of the job execution"), + fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"), + fieldWithPath("name").description("The name of the job execution"), + fieldWithPath("startDate").description("The start date of the job execution"), + fieldWithPath("startTime").description("The start time of the job execution"), + fieldWithPath("duration").description("The duration of the job execution"), + fieldWithPath("jobParameters").description("The parameters of the job execution"), + fieldWithPath("jobParametersString").description("The parameters string of the job execution"), + fieldWithPath("restartable").description("The status restartable of the job execution"), + fieldWithPath("abandonable").description("The status abandonable of the job execution"), + fieldWithPath("stoppable").description("The status stoppable of the job execution"), + fieldWithPath("defined").description("The status defined of the job execution"), + fieldWithPath("timeZone").description("The time zone of the job execution"), + subsectionWithPath("jobExecution").description("The details of the job execution"), + subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"), + subsectionWithPath("_links.self").description("Link to the stream definition resource"), + subsectionWithPath("_links.stop").type(JsonFieldType.OBJECT).description("Link to stopping the job").optional(), + subsectionWithPath("_links.restart").type(JsonFieldType.OBJECT).description("Link to restarting the job").optional() + ) + )); } @Test - public void jobStop() throws Exception { - this.mockMvc.perform(put("/jobs/executions/{id}", "1").accept(MediaType.APPLICATION_JSON).param("stop", "true")) - .andDo(print()) + void jobStop() throws Exception { + this.mockMvc.perform(put("/jobs/executions/{id}", "1") + .queryParam("stop", "true") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("id") - .description("The id of an existing job execution (required)")) - , requestParameters( - parameterWithName("stop") + .description("The id of an existing job execution")) + , queryParameters( + parameterWithName("stop").optional() .description("Sends signal to stop the job if set to true")))); } @Test - public void jobRestart() throws Exception { - this.mockMvc.perform(put("/jobs/executions/{id}", "2").accept(MediaType.APPLICATION_JSON).param("restart", "true")) - .andDo(print()) + void jobRestart() throws Exception { + this.mockMvc.perform(put("/jobs/executions/{id}", "2") + .queryParam("restart", "true") + .queryParam("useJsonJobParameters", "true") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - pathParameters(parameterWithName("id") - .description("The id of an existing job execution (required)")) - , requestParameters( - parameterWithName("restart") - .description("Sends signal to restart the job if set to true")))); + pathParameters(parameterWithName("id") + .description("The id of an existing job execution")) + , queryParameters( + parameterWithName("useJsonJobParameters").description("If true dataflow will " + + "serialize job parameters as JSON. Default is null, and the default " + + "configuration will be used to determine serialization method.").optional(), + parameterWithName("restart").optional() + .description("Sends signal to restart the job if set to true") + ) + ) + ); } - private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + private void initialize() { + this.taskExecutionDao = context.getBean(TaskExecutionDao.class); + this.taskBatchDao = context.getBean(TaskBatchDao.class); + this.jobRepository = context.getBean(JobRepository.class); + this.dataflowTaskExecutionMetadataDao = context.getBean(DataflowTaskExecutionMetadataDao.class); } - private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null); - Map jobParameterMap = new HashMap<>(); + private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException { + TaskExecution taskExecution = taskExecutionDao.createTaskExecution(name, LocalDateTime.now(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null); + Map> jobParameterMap = new HashMap<>(); JobParameters jobParameters = new JobParameters(jobParameterMap); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + JobExecution jobExecution = this.jobRepository.createJobExecution(name, jobParameters); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); - jobExecution.setStartTime(new Date()); + jobExecution.setStartTime(LocalDateTime.now()); this.jobRepository.update(jobExecution); - TaskManifest manifest = new TaskManifest(); + final TaskManifest manifest = new TaskManifest(); manifest.setPlatformName("default"); - DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); - - DataflowTaskExecutionMetadataDao metadataDao = new JdbcDataflowTaskExecutionMetadataDao( - dataSource, incrementerFactory.getIncrementer("h2", "task_execution_metadata_seq")); + assertThat(dataflowTaskExecutionMetadataDao).isNotNull(); TaskManifest taskManifest = new TaskManifest(); taskManifest.setPlatformName("default"); - metadataDao.save(taskExecution, taskManifest); + dataflowTaskExecutionMetadataDao.save(taskExecution, taskManifest); } - } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java index c789368bc3..3cc14b2d13 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,29 +16,26 @@ package org.springframework.cloud.dataflow.server.rest.documentation; +import java.time.LocalDateTime; import java.util.ArrayList; -import java.util.Date; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; +import org.springframework.batch.core.repository.JobRestartException; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; @@ -46,7 +43,7 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -54,43 +51,40 @@ * Documentation for the /jobs/instances endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) +@SuppressWarnings({"NewClassNamingConvention","SameParameterValue"}) + +@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class}) @DirtiesContext -public class JobInstancesDocumentation extends BaseDocumentation { +class JobInstancesDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; - private static boolean initialized; private JobRepository jobRepository; - private TaskExecutionDao dao; + private TaskExecutionDao taskExecutionDao; private TaskBatchDao taskBatchDao; - @Before - public void setup() throws Exception { - if (!initialized) { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); - initialize(); - createJobExecution(JOB_NAME, BatchStatus.STARTED); - initialized = true; - } + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); + initialize(); + createJobExecution(JOB_NAME, BatchStatus.STARTED); } @Test - public void listJobInstances() throws Exception { + void listJobInstances() throws Exception { this.mockMvc.perform( get("/jobs/instances") .param("name", JOB_NAME) .param("page", "0") .param("size", "10")) - .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), parameterWithName("name") .description("The name associated with the job instance")), responseFields( @@ -101,14 +95,13 @@ public void listJobInstances() throws Exception { } @Test - public void jobDisplayDetail() throws Exception { + void jobDisplayDetail() throws Exception { this.mockMvc.perform( get("/jobs/instances/{id}", "1")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("id").description("The id of an existing job instance (required)") + parameterWithName("id").description("The id of an existing job instance") ), responseFields( fieldWithPath("jobName").description("The name of the job instance"), @@ -120,21 +113,18 @@ public void jobDisplayDetail() throws Exception { } - private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + private void initialize() { + this.jobRepository = context.getBean(JobRepository.class); + this.taskExecutionDao = context.getBean(TaskExecutionDao.class); + this.taskBatchDao = context.getBean(TaskBatchDao.class); } - private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException { + TaskExecution taskExecution = taskExecutionDao.createTaskExecution(name, LocalDateTime.now(), new ArrayList<>(), null); + JobExecution jobExecution = jobRepository.createJobExecution(name, new JobParameters()); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); - jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); + jobExecution.setStartTime(LocalDateTime.now()); + jobRepository.update(jobExecution); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java index 54b2a37e15..8c0926cd8e 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,30 +16,27 @@ package org.springframework.cloud.dataflow.server.rest.documentation; +import java.time.LocalDateTime; import java.util.ArrayList; -import java.util.Date; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; +import org.springframework.batch.core.repository.JobRestartException; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.junit4.SpringRunner; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; @@ -48,125 +45,124 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Documentation for the /jobs/executions/{id}/steps endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) +@SuppressWarnings({"NewClassNamingConvention","SameParameterValue"}) +@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class}) @DirtiesContext -public class JobStepExecutionsDocumentation extends BaseDocumentation { +class JobStepExecutionsDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; - private static boolean initialized; private JobRepository jobRepository; - private TaskExecutionDao dao; + + private TaskExecutionDao taskExecutionDao; + private TaskBatchDao taskBatchDao; - @Before - public void setup() throws Exception { - if (!initialized) { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); - initialize(); - createJobExecution(JOB_NAME, BatchStatus.STARTED); - - documentation.dontDocument(() -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", "DOCJOB1") - .param("definition", "timestamp --format='YYYY MM DD'")) - .andExpect(status().isOk())); - initialized = true; - } + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); + initialize(); + createJobExecution(JOB_NAME, BatchStatus.STARTED); + + documentation.dontDocument( + () -> this.mockMvc + .perform(post("/tasks/definitions").param("name", "DOCJOB1") + .param("definition", "timestamp --format='YYYY MM DD'")) + .andExpect(status().isOk())); } @Test - public void listStepExecutionsForJob() throws Exception { + void listStepExecutionsForJob() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps", "1") - .param("page", "0") - .param("size", "10")) + get("/jobs/executions/{id}/steps", "1") + .param("page", "0") + .param("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - pathParameters(parameterWithName("id") - .description("The id of an existing job execution (required)")), - responseFields( - subsectionWithPath("_embedded.stepExecutionResourceList") - .description("Contains a collection of Step Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size")), + pathParameters(parameterWithName("id") + .description("The id of an existing job execution")), + responseFields( + subsectionWithPath("_embedded.stepExecutionResourceList") + .description("Contains a collection of Step Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties")))); } @Test - public void stepDetail() throws Exception { + void stepDetail() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps/{stepid}", "1", "1")) - .andExpect(status().isOk()).andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)"), - parameterWithName("stepid") - .description("The id of an existing step execution for a specific job execution (required)") - ), - responseFields( - fieldWithPath("jobExecutionId").description("The ID of the job step execution"), - fieldWithPath("stepType").description("The type of the job step execution"), - subsectionWithPath("stepExecution").description("The step details of the job step execution"), - subsectionWithPath("_links.self").description("Link to the job step execution resource") - ) - )); + get("/jobs/executions/{id}/steps/{stepid}", "1", "1")) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution"), + parameterWithName("stepid") + .description("The id of an existing step execution for a specific job execution") + ), + responseFields( + fieldWithPath("jobExecutionId").description("The ID of the job step execution"), + fieldWithPath("stepType").description("The type of the job step execution"), + subsectionWithPath("stepExecution").description("The step details of the job step execution"), + subsectionWithPath("_links.self").description("Link to the job step execution resource"), + subsectionWithPath("_links.progress").description("Link to retrieve the progress") + ) + )); } @Test - public void stepProgress() throws Exception { + void stepProgress() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1")) - .andExpect(status().isOk()).andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)"), - parameterWithName("stepid") - .description("The id of an existing step execution for a specific job execution (required)") - ), - responseFields( - subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"), - subsectionWithPath("stepExecutionHistory") - .description("The history of the job step execution"), - fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"), - fieldWithPath("finished").description("The status finished of the job step execution"), - fieldWithPath("duration").description("The duration of the job step execution"), - subsectionWithPath("_links.self").description("Link to the job step execution resource") - ) - )); + get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1")) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution"), + parameterWithName("stepid") + .description("The id of an existing step execution for a specific job execution") + ), + responseFields( + subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"), + subsectionWithPath("stepExecutionHistory") + .description("The history of the job step execution"), + fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"), + fieldWithPath("finished").description("The status finished of the job step execution"), + fieldWithPath("duration").description("The duration of the job step execution"), + subsectionWithPath("_links.self").description("Link to the job step execution resource"), + subsectionWithPath("_links.progress").description("Link to the job step progress") + ) + )); } - private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + private void initialize() { + this.jobRepository = context.getBean(JobRepository.class); + this.taskExecutionDao = context.getBean(TaskExecutionDao.class); + this.taskBatchDao = context.getBean(TaskBatchDao.class); } - private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); + private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, + JobExecutionAlreadyRunningException, JobRestartException { + TaskExecution taskExecution = taskExecutionDao.createTaskExecution(name, LocalDateTime.now(), new ArrayList<>(), null); + JobExecution jobExecution = jobRepository.createJobExecution(name, new JobParameters()); StepExecution stepExecution = new StepExecution(name + "_STEP", jobExecution, jobExecution.getId()); stepExecution.setId(null); jobRepository.add(stepExecution); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); - jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); + jobExecution.setStartTime(LocalDateTime.now()); + jobRepository.update(jobExecution); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java index 8a40bae482..f5ca367b80 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java @@ -19,9 +19,9 @@ import java.util.ArrayList; import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.skipper.domain.Info; @@ -41,26 +41,28 @@ * * @author Eric Bottard * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ +@SuppressWarnings("NewClassNamingConvention") @DirtiesContext -public class RuntimeAppsDocumentation extends BaseDocumentation { +class RuntimeAppsDocumentation extends BaseDocumentation { - @Before - public void setup() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.sink, "log", "1.2.0.RELEASE"); + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.source, "http", "5.0.0"); + registerApp(ApplicationType.sink, "log", "5.0.0"); createStream("mystream", "http | log", true); } - @After - public void cleanup() throws Exception { + @AfterEach + void cleanup() throws Exception { destroyStream("mystream"); unregisterApp(ApplicationType.source, "http"); unregisterApp(ApplicationType.sink, "log"); } @Test - public void listRuntimeStreamStatus() throws Exception { + void listRuntimeStreamStatus() throws Exception { this.mockMvc.perform( get("/runtime/streams") .accept(MediaType.APPLICATION_JSON) @@ -70,7 +72,7 @@ public void listRuntimeStreamStatus() throws Exception { } @Test - public void listRuntimeStreamStatusV2() throws Exception { + void listRuntimeStreamStatusV2() throws Exception { this.mockMvc.perform( get("/runtime/streams/status") .accept(MediaType.APPLICATION_JSON) @@ -80,7 +82,7 @@ public void listRuntimeStreamStatusV2() throws Exception { } @Test - public void listAllApps() throws Exception { + void listAllApps() throws Exception { this.mockMvc.perform( get("/runtime/apps") .accept(MediaType.APPLICATION_JSON) @@ -90,7 +92,7 @@ public void listAllApps() throws Exception { } @Test - public void listSingleAppAllInstances() throws Exception { + void listSingleAppAllInstances() throws Exception { Info info = new Info(); info.setStatus(new Status()); @@ -118,7 +120,7 @@ public void listSingleAppAllInstances() throws Exception { } @Test - public void getSingleAppSingleInstance() throws Exception { + void getSingleAppSingleInstance() throws Exception { Info info = new Info(); info.setStatus(new Status()); diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java index cd985e1669..7bfbf210be 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java @@ -16,8 +16,7 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.http.MediaType; @@ -26,12 +25,13 @@ /** * @author Gunnar Hillert + * @author Corneil du Plessis */ -@Ignore -public class RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +class RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation extends BaseDocumentation { @Test - public void getMetricsWithoutCollectorRunning() throws Exception { + void getMetricsWithoutCollectorRunning() throws Exception { this.mockMvc.perform(get("/runtime/streams") .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()); diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java index a90d004051..5288fe3c5b 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java @@ -18,12 +18,13 @@ import java.util.Arrays; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.test.annotation.DirtiesContext; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.delete; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; @@ -33,7 +34,7 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -42,45 +43,39 @@ * * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class StreamDefinitionsDocumentation extends BaseDocumentation { - - private static boolean setUpIsDone = false; - - @Before - public void setup() throws Exception { - if (setUpIsDone) { - return; - } - - +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) +@DirtiesContext +class StreamDefinitionsDocumentation extends BaseDocumentation { + @BeforeEach + void setup() throws Exception { this.mockMvc.perform( post("/apps/{type}/time", "source") - .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE")) + .queryParam("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:5.0.0")) .andExpect(status().isCreated()); this.mockMvc.perform( post("/apps/{type}/log", "sink") - .param("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE")) + .queryParam("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:5.0.0")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test - public void createDefinition() throws Exception { + void createDefinition() throws Exception { this.mockMvc.perform( post("/streams/definitions") - .param("name", "timelog") - .param("definition", "time --format='YYYY MM DD' | log") - .param("description", "Demo stream for testing") - .param("deploy", "false")) + .queryParam("name", "timelog") + .queryParam("definition", "time --format='YYYY MM DD' | log") + .queryParam("description", "Demo stream for testing") + .queryParam("deploy", "false")) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( - requestParameters( + queryParameters( parameterWithName("name").description("The name for the created task definitions"), parameterWithName("definition").description("The definition for the stream, using Data Flow DSL"), parameterWithName("description").description("The description of the stream definition"), - parameterWithName("deploy") + parameterWithName("deploy").optional() .description("If true, the stream is deployed upon creation (default is false)")), responseFields( fieldWithPath("name").description("The name of the created stream definition"), @@ -96,21 +91,27 @@ public void createDefinition() throws Exception { } @Test - public void listAllStreamDefinitions() throws Exception { + void listAllStreamDefinitions() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/streams/definitions").queryParam("name", "timelog") + .queryParam("definition", "time --format='YYYY MM DD' | log") + .queryParam("description", "Demo stream for testing") + .queryParam("deploy", "false")) + .andExpect(status().isCreated())); this.mockMvc.perform( get("/streams/definitions") - .param("page", "0") - .param("sort", "name,ASC") - .param("search", "") - .param("size", "10")) - .andDo(print()) + .queryParam("page", "0") + .queryParam("sort", "name,ASC") + .queryParam("search", "") + .queryParam("size", "10")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page").description("The zero-based page number (optional)"), - parameterWithName("search").description("The search string performed on the name (optional)"), - parameterWithName("sort").description("The sort on the list (optional)"), - parameterWithName("size").description("The requested page size (optional)")), + queryParameters( + parameterWithName("page").optional().description("The zero-based page number"), + parameterWithName("search").optional().description("The search string performed on the name"), + parameterWithName("sort").optional().description("The sort on the list"), + parameterWithName("size").optional().description("The requested page size")), responseFields( subsectionWithPath("_embedded.streamDefinitionResourceList") .description("Contains a collection of Stream Definitions"), @@ -119,14 +120,20 @@ public void listAllStreamDefinitions() throws Exception { } @Test - public void getStreamDefinition() throws Exception { + void getStreamDefinition() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/streams/definitions").queryParam("name", "timelog") + .queryParam("definition", "time --format='YYYY MM DD' | log") + .queryParam("description", "Demo stream for testing") + .queryParam("deploy", "false")) + .andExpect(status().isCreated())); this.mockMvc.perform( get("/streams/definitions/{name}", "timelog")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("name").description("The name of the stream definition to query (required)") + parameterWithName("name").description("The name of the stream definition to query") ), responseFields( fieldWithPath("name").description("The name of the stream definition"), @@ -141,15 +148,14 @@ public void getStreamDefinition() throws Exception { } @Test - public void getStreamApplications() throws Exception { + void getStreamApplications() throws Exception { createStream("mysamplestream", "time | log", false); this.mockMvc.perform( get("/streams/definitions/{name}/applications", "mysamplestream")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("name").description("The name of the stream definition to query (required)") + parameterWithName("name").description("The name of the stream definition to query") ), responseFields( fieldWithPath("[]").description("An array of applications"), @@ -158,6 +164,7 @@ public void getStreamApplications() throws Exception { fieldWithPath("[].type").description("The type of the application. One of " + Arrays .asList(ApplicationType.values())), fieldWithPath("[].uri").description("The uri of the application"), + fieldWithPath("[].metaDataUri").description("The uri of the application metadata"), fieldWithPath("[].version").description("The version of the application"), fieldWithPath("[].defaultVersion").description("If true, the application is the default version"), fieldWithPath("[].versions").description("All the registered versions of the application"), @@ -166,26 +173,32 @@ public void getStreamApplications() throws Exception { } @Test - public void listRelatedStreamDefinitions() throws Exception { + void listRelatedStreamDefinitions() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/streams/definitions").queryParam("name", "timelog") + .queryParam("definition", "time --format='YYYY MM DD' | log") + .queryParam("description", "Demo stream for testing") + .queryParam("deploy", "false")) + .andExpect(status().isCreated())); this.mockMvc.perform( get("/streams/definitions/{name}/related", "timelog") - .param("page", "0") - .param("sort", "name,ASC") - .param("search", "") - .param("size", "10") - .param("nested", "true")) - .andDo(print()) + .queryParam("page", "0") + .queryParam("sort", "name,ASC") + .queryParam("search", "") + .queryParam("size", "10") + .queryParam("nested", "true")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("nested") - .description("Should we recursively findByTaskNameContains for related stream definitions (optional)"), - parameterWithName("page").description("The zero-based page number (optional)"), - parameterWithName("search").description("The search string performed on the name (optional)"), - parameterWithName("sort").description("The sort on the list (optional)"), - parameterWithName("size").description("The requested page size (optional)")), + queryParameters( + parameterWithName("nested").optional() + .description("Should we recursively findByTaskNameContains for related stream definitions"), + parameterWithName("page").optional().description("The zero-based page number"), + parameterWithName("search").optional().description("The search string performed on the name"), + parameterWithName("sort").optional().description("The sort on the list"), + parameterWithName("size").optional().description("The requested page size")), pathParameters(parameterWithName("name") - .description("The name of an existing stream definition (required)")), + .description("The name of an existing stream definition")), responseFields( subsectionWithPath("_embedded.streamDefinitionResourceList") .description("Contains a collection of Stream Definitions"), @@ -195,22 +208,34 @@ public void listRelatedStreamDefinitions() throws Exception { } @Test - public void streamDefinitionDelete1() throws Exception { + void streamDefinitionDelete1() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/streams/definitions").queryParam("name", "timelog") + .queryParam("definition", "time --format='YYYY MM DD' | log") + .queryParam("description", "Demo stream for testing") + .queryParam("deploy", "false")) + .andExpect(status().isCreated())); this.mockMvc.perform( delete("/streams/definitions/{name}", "timelog")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") - .description("The name of an existing stream definition (required)")) + .description("The name of an existing stream definition")) )); } @Test - public void streamDefinitionDeleteAll() throws Exception { + void streamDefinitionDeleteAll() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/streams/definitions").queryParam("name", "timelog") + .queryParam("definition", "time --format='YYYY MM DD' | log") + .queryParam("description", "Demo stream for testing") + .queryParam("deploy", "false")) + .andExpect(status().isCreated())); this.mockMvc.perform( delete("/streams/definitions")) - .andDo(print()) .andExpect(status().isOk()); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java index e6bdf4afd3..aa65fe2746 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,22 +17,23 @@ package org.springframework.cloud.dataflow.server.rest.documentation; import java.io.IOException; -import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.MethodName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.rest.UpdateStreamRequest; import org.springframework.cloud.skipper.domain.PackageIdentifier; import org.springframework.cloud.skipper.domain.Release; import org.springframework.cloud.skipper.domain.RollbackRequest; import org.springframework.http.MediaType; +import org.springframework.test.annotation.DirtiesContext; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.when; @@ -41,26 +42,22 @@ import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * @author Glenn Renfro * @author Ilayaperumal Gopinathan * @author Christian Tzolov + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodName.class) +@DirtiesContext public class StreamDeploymentsDocumentation extends BaseDocumentation { - private static boolean setUpIsDone = false; - - @Before - public void setup() throws Exception { - if (setUpIsDone) { - return; - } - + @BeforeEach + void setup() throws Exception { this.mockMvc.perform( post("/apps/{type}/time", "source") .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE") @@ -83,41 +80,39 @@ public void setup() throws Exception { .param("definition", "time --format='YYYY MM DD' | log") .param("deploy", "false")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test - public void scale() throws Exception { + void scale() throws Exception { String json = "{\"app.time.timestamp.format\":\"YYYY\"}"; this.mockMvc.perform( post("/streams/deployments/scale/{streamName}/{appName}/instances/{count}", "timelog", "log", 1) .contentType(MediaType.APPLICATION_JSON) .content(json)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document(pathParameters( parameterWithName("streamName") - .description("the name of an existing stream definition (required)"), - parameterWithName("appName") + .description("the name of an existing stream definition"), + parameterWithName("appName").optional() .description("in stream application name to scale"), parameterWithName("count") - .description("number of instances for the selected stream application (required)")) + .description("number of instances for the selected stream application")) )); } @Test - public void unDeploy() throws Exception { + void unDeploy() throws Exception { this.mockMvc.perform( delete("/streams/deployments/{timelog}", "timelog")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog") - .description("The name of an existing stream definition (required)")) + .description("The name of an existing stream definition")) )); } @Test - public void unDeployAll() throws Exception { + void unDeployAll() throws Exception { this.mockMvc.perform( delete("/streams/deployments")) .andExpect(status().isOk()) @@ -126,7 +121,7 @@ public void unDeployAll() throws Exception { @Test - public void info() throws Exception { + void info() throws Exception { String json = "{\"app.time.timestamp.format\":\"YYYY\"}"; this.mockMvc.perform( get("/streams/deployments/{timelog}?reuse-deployment-properties=true", "timelog") @@ -135,47 +130,44 @@ public void info() throws Exception { .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog") - .description("The name of an existing stream definition (required)")), - requestParameters(parameterWithName("reuse-deployment-properties") + .description("The name of an existing stream definition")), + queryParameters(parameterWithName("reuse-deployment-properties").optional() .description(parameterWithName("The name of the flag to reuse the deployment properties"))) )); } @Test - public void deploy() throws Exception { + void deploy() throws Exception { String json = "{\"app.time.timestamp.format\":\"YYYY\"}"; this.mockMvc.perform( post("/streams/deployments/{timelog}", "timelog") .contentType(MediaType.APPLICATION_JSON) .content(json)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog") - .description("The name of an existing stream definition (required)")) + .description("The name of an existing stream definition")) )); } @Test - public void streamUpdate() throws Exception { + void streamUpdate() throws Exception { String json = "{\"app.time.timestamp.format\":\"YYYY\"}"; this.mockMvc.perform( post("/streams/deployments/{timelog1}", "timelog1") .contentType(MediaType.APPLICATION_JSON) .content(json)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog1") - .description("The name of an existing stream definition (required)")) + .description("The name of an existing stream definition")) )); - Thread.sleep(30000); UpdateStreamRequest updateStreamRequest = new UpdateStreamRequest(); updateStreamRequest.setReleaseName("timelog1"); Map updateProperties = new HashMap<>(); updateProperties.put("app.time.timestamp.format", "YYYYMMDD"); updateStreamRequest.setUpdateProperties(updateProperties); - final String releaseName = "myLogRelease"; + final PackageIdentifier packageIdentifier = new PackageIdentifier(); packageIdentifier.setPackageName("timelog1"); packageIdentifier.setPackageVersion("1.0.0"); @@ -186,73 +178,65 @@ public void streamUpdate() throws Exception { post("/streams/deployments/update/{timelog1}", "timelog1") .contentType(MediaType.APPLICATION_JSON) .content(convertObjectToJson(updateStreamRequest))) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("timelog1") - .description("The name of an existing stream definition (required)")) + .description("The name of an existing stream definition")) )); - Thread.sleep(30000); } @Test - public void rollback() throws Exception { - RollbackRequest rollbackRequest = new RollbackRequest(); + void rollback() throws Exception { + final RollbackRequest rollbackRequest = new RollbackRequest(); rollbackRequest.setReleaseName("timelog1"); this.mockMvc.perform( post("/streams/deployments/rollback/{name}/{version}", "timelog1", 1) .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") - .description("The name of an existing stream definition (required)"), + .description("The name of an existing stream definition"), parameterWithName("version").description("The version to rollback to")))); - Thread.sleep(30000); } @Test - public void history() throws Exception { - when(this.springDataflowServer.getSkipperClient().history(anyString())) - .thenReturn(Arrays.asList(new Release())); + void history() throws Exception { + when(springDataflowServer.getSkipperClient().history(anyString())) + .thenReturn(Collections.singletonList(new Release())); this.mockMvc.perform( get("/streams/deployments/history/{name}", "timelog1") .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") - .description("The name of an existing stream definition (required)")))); + .description("The name of an existing stream definition")))); } @Test - public void manifest() throws Exception { + void manifest() throws Exception { this.mockMvc.perform( get("/streams/deployments/manifest/{name}/{version}", "timelog1", 1) .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("name") - .description("The name of an existing stream definition (required)"), + .description("The name of an existing stream definition"), parameterWithName("version").description("The version of the stream")))); } @Test - public void platformList() throws Exception { + void platformList() throws Exception { this.mockMvc.perform( get("/streams/deployments/platform/list") .contentType(MediaType.APPLICATION_JSON)) - .andDo(print()) .andExpect(status().isOk()); } public static String convertObjectToJson(Object object) throws IOException { ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - String json = mapper.writeValueAsString(object); - return json; + return mapper.writeValueAsString(object); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java index 5e60efb9ee..eaaa28c129 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java @@ -19,27 +19,28 @@ import java.util.HashMap; import java.util.Map; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.skipper.domain.LogInfo; import static org.mockito.Mockito.when; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Documentation for the {@code /streams/logs} endpoint. * * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class StreamLogsDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class StreamLogsDocumentation extends BaseDocumentation { @Test - public void getLogsByStreamName() throws Exception { + void getLogsByStreamName() throws Exception { LogInfo logInfo = new LogInfo(); Map logs = new HashMap<>(); logs.put("ticktock-log-v1", "Logs-log"); @@ -48,13 +49,12 @@ public void getLogsByStreamName() throws Exception { when(springDataflowServer.getSkipperClient().getLog("ticktock")).thenReturn(logInfo); this.mockMvc.perform( get("/streams/logs/ticktock")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document()); } @Test - public void getLogsByAppName() throws Exception { + void getLogsByAppName() throws Exception { LogInfo logInfo = new LogInfo(); Map logs = new HashMap<>(); logs.put("ticktock-log-v1", "Logs-log"); @@ -62,7 +62,6 @@ public void getLogsByAppName() throws Exception { when(springDataflowServer.getSkipperClient().getLog("ticktock", "ticktock-log-v1")).thenReturn(logInfo); this.mockMvc.perform( get("/streams/logs/ticktock/ticktock-log-v1")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document()); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java index 6945376216..04e0a52b39 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java @@ -16,11 +16,6 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; - import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; @@ -30,22 +25,23 @@ import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; + /** * Documentation for the /streams/validation endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class StreamValidationDocumentation extends BaseDocumentation { - - private static boolean setUpIsDone = false; - - @Before - public void setup() throws Exception { - if (setUpIsDone) { - return; - } +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class StreamValidationDocumentation extends BaseDocumentation { + @BeforeEach + void setup() throws Exception { this.mockMvc.perform( post("/apps/{type}/time", "source") .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE") @@ -56,11 +52,10 @@ public void setup() throws Exception { .param("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE") .param("force", "true")) .andExpect(status().isCreated()); - setUpIsDone = true; } @Test - public void validateStream() throws Exception { + void validateStream() throws Exception { this.mockMvc.perform( post("/streams/definitions") .param("name", "timelog") @@ -74,7 +69,7 @@ public void validateStream() throws Exception { .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("name").description("The name of a stream definition to be validated (required)") + parameterWithName("name").description("The name of a stream definition to be validated") ), responseFields( fieldWithPath("appName").description("The name of a stream definition"), diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java index 44c11c2566..a67a028a9b 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -32,7 +32,7 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -41,34 +41,35 @@ * * @author Eric Bottard * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class TaskDefinitionsDocumentation extends BaseDocumentation { -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class TaskDefinitionsDocumentation extends BaseDocumentation { - - @Before - public void setup() throws Exception { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { unregisterApp(ApplicationType.task, "timestamp"); } @Test - public void createDefinition() throws Exception { + void createDefinition() throws Exception { this.mockMvc.perform( post("/tasks/definitions") - .param("name", "my-task") - .param("definition", "timestamp --format='YYYY MM DD'") - .param("description", "Demo task definition for testing")) + .queryParam("name", "my-task") + .queryParam("definition", "timestamp --format='YYYY MM DD'") + .queryParam("description", "Demo task definition for testing")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( + queryParameters( parameterWithName("name").description("The name for the created task definition"), parameterWithName("definition").description("The definition for the task, using Data Flow DSL"), - parameterWithName("description").description("The description of the task definition") + parameterWithName("description").optional().description("The description of the task definition") ), responseFields( fieldWithPath("name").description("The name of the created task definition"), @@ -85,24 +86,29 @@ public void createDefinition() throws Exception { } @Test - public void listAllTaskDefinitions() throws Exception { + void listAllTaskDefinitions() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/tasks/definitions").queryParam("name", "my-task") + .queryParam("definition", "timestamp --format='YYYY MM DD'") + .queryParam("description", "Demo task definition for testing")) + .andExpect(status().isOk())); this.mockMvc.perform( get("/tasks/definitions") - .param("page", "0") - .param("size", "10") - .param("sort", "taskName,ASC") - .param("search", "") - .param("manifest", "true") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("sort", "taskName,ASC") + .queryParam("taskName", "") + .queryParam("manifest", "true") ) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page").description("The zero-based page number (optional)"), - parameterWithName("size").description("The requested page size (optional)"), - parameterWithName("search").description("The search string performed on the name (optional)"), - parameterWithName("sort").description("The sort on the list (optional)"), - parameterWithName("manifest").description("The flag to include the task manifest into the latest task execution (optional)") + queryParameters( + parameterWithName("page").optional().description("The zero-based page number"), + parameterWithName("size").optional().description("The requested page size"), + parameterWithName("taskName").optional().description("The task name to limit the results"), + parameterWithName("sort").optional().description("The sort on the list"), + parameterWithName("manifest").optional().description("The flag to include the task manifest into the latest task execution") ), responseFields( subsectionWithPath("_embedded.taskDefinitionResourceList") @@ -112,18 +118,23 @@ public void listAllTaskDefinitions() throws Exception { } @Test - public void displayDetail() throws Exception { + void displayDetail() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/tasks/definitions").queryParam("name", "my-task") + .queryParam("definition", "timestamp --format='YYYY MM DD'") + .queryParam("description", "Demo task definition for testing")) + .andExpect(status().isOk())); this.mockMvc.perform( get("/tasks/definitions/{my-task}","my-task") - .param("manifest", "true")) - .andDo(print()) + .queryParam("manifest", "true")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("my-task").description("The name of an existing task definition (required)") + parameterWithName("my-task").description("The name of an existing task definition") ), - requestParameters( - parameterWithName("manifest").description("The flag to include the task manifest into the latest task execution (optional)") + queryParameters( + parameterWithName("manifest").optional().description("The flag to include the task manifest into the latest task execution") ), responseFields( fieldWithPath("name").description("The name of the created task definition"), @@ -140,17 +151,22 @@ public void displayDetail() throws Exception { } @Test - public void taskDefinitionDelete() throws Exception { + void taskDefinitionDelete() throws Exception { + this.documentation.dontDocument( + () -> this.mockMvc + .perform(post("/tasks/definitions").queryParam("name", "my-task") + .queryParam("definition", "timestamp --format='YYYY MM DD'") + .queryParam("description", "Demo task definition for testing")) + .andExpect(status().isOk())); this.mockMvc.perform( delete("/tasks/definitions/{my-task}", "my-task") - .param("cleanup", "true")) - .andDo(print()) + .queryParam("cleanup", "true")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("my-task").description("The name of an existing task definition (required)")), - requestParameters( - parameterWithName("cleanup").description("The flag to indicate if the associated task executions needed to be cleaned up") + parameterWithName("my-task").description("The name of an existing task definition")), + queryParameters( + parameterWithName("cleanup").optional().description("The flag to indicate if the associated task executions needed to be cleaned up") ) )); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java index 3db635cda1..9211b54e60 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2019 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,13 +16,19 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import java.util.concurrent.atomic.AtomicReference; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.restdocs.payload.JsonFieldType; +import org.springframework.test.web.servlet.MvcResult; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.delete; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; @@ -32,7 +38,7 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -43,67 +49,101 @@ * @author Glenn Renfro * @author David Turanski * @author Gunnar Hillert + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class TaskExecutionsDocumentation extends BaseDocumentation { - - @Before - public void setup() throws Exception { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class TaskExecutionsDocumentation extends BaseDocumentation { + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); createTaskDefinition("taskA"); createTaskDefinition("taskB"); - + executeTask("taskA"); + executeTask("taskB"); } - @After - public void tearDown() throws Exception { + + @AfterEach + void tearDown() throws Exception { + cleanupTaskExecutions("taskA"); + cleanupTaskExecutions("taskB"); destroyTaskDefinition("taskA"); destroyTaskDefinition("taskB"); unregisterApp(ApplicationType.task, "timestamp"); } @Test - public void launchTask() throws Exception { + void launchTaskBoot3() throws Exception { this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskA") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + post("/tasks/executions/launch") + .queryParam("name", "taskA") + .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .queryParam("arguments", "--server.port=8080 --foo=bar") + ) + .andExpect(status().isCreated()) + .andDo(this.documentationHandler.document( + queryParameters( + parameterWithName("name").description("The name of the task definition to launch"), + parameterWithName("properties").optional() + .description("Application and Deployer properties to use while launching."), + parameterWithName("arguments").optional() + .description("Command line arguments to pass to the task.")), + responseFields( + fieldWithPath("executionId").description("The id of the task execution"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").type(fieldWithPath("_links.tasks/logs").ignored().optional()).description("Link to the task execution logs").optional() + ) + ) + ); + } + + @Test + void launchTask() throws Exception { + this.mockMvc.perform( + post("/tasks/executions") + .queryParam("name", "taskA") + .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .queryParam("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("name").description("The name of the task definition to launch"), - parameterWithName("properties").optional() - .description("Application and Deployer properties to use while launching"), - parameterWithName("arguments").optional() - .description("Command line arguments to pass to the task")))); + queryParameters( + parameterWithName("name").description("The name of the task definition to launch"), + parameterWithName("properties").optional() + .description("Application and Deployer properties to use while launching."), + parameterWithName("arguments").optional() + .description("Command line arguments to pass to the task.") + ) + ) + ); } @Test - public void getTaskCurrentCount() throws Exception { + void getTaskCurrentCount() throws Exception { this.mockMvc.perform( - get("/tasks/executions/current")) - .andDo(print()) + get("/tasks/executions/current") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - responseFields( - fieldWithPath("[].name").description("The name of the platform instance (account)"), - fieldWithPath("[].type").description("The platform type"), - fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"), - fieldWithPath("[].runningExecutionCount").description("The number of running executions") - ) + responseFields( + fieldWithPath("[].name").description("The name of the platform instance (account)"), + fieldWithPath("[].type").description("The platform type"), + fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"), + fieldWithPath("[].runningExecutionCount").description("The number of running executions") + ) )); } @Test - public void launchTaskDisplayDetail() throws Exception { + void getTaskDisplayDetail() throws Exception { this.mockMvc.perform( - get("/tasks/executions/{id}", "1")) - .andDo(print()) + get("/tasks/executions/{id}", "1") + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("id").description("The id of an existing task execution (required)") + parameterWithName("id").description("The id of an existing task execution") ), responseFields( fieldWithPath("executionId").description("The id of the task execution"), @@ -121,131 +161,301 @@ public void launchTaskDisplayDetail() throws Exception { "null if task execution does not have parent"), fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"), subsectionWithPath("appProperties").description("The application properties of the task execution"), - subsectionWithPath("deploymentProperties").description("The deployment properties of the task exectuion"), subsectionWithPath("deploymentProperties").description("The deployment properties of the task execution"), subsectionWithPath("platformName").description("The platform selected for the task execution"), - subsectionWithPath("_links.self").description("Link to the task execution resource") + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").description("Link to the task execution logs") ) )); } @Test - public void listTaskExecutions() throws Exception { - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskB") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + void getTaskDisplayDetailByExternalId() throws Exception { + final AtomicReference externalExecutionId = new AtomicReference<>(null); + documentation.dontDocument(() -> { + MvcResult mvcResult = this.mockMvc.perform( + get("/tasks/executions") + .queryParam("page", "0") + .queryParam("size", "20")) + .andExpect(status().isOk()).andReturn(); + ObjectMapper mapper = new ObjectMapper(); + JsonNode node = mapper.readTree(mvcResult.getResponse().getContentAsString()); + JsonNode list = node.get("_embedded").get("taskExecutionResourceList"); + JsonNode first = list.get(0); + externalExecutionId.set(first.get("externalExecutionId").asText()); + return externalExecutionId.get(); + }); + + this.mockMvc.perform( + get("/tasks/executions/external/{externalExecutionId}", externalExecutionId.get()).queryParam("platform", "default") + ) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("externalExecutionId").description("The external ExecutionId of an existing task execution") + ), + queryParameters( + parameterWithName("platform").optional().description("The name of the platform.") + ), + responseFields( + fieldWithPath("executionId").description("The id of the task execution"), + fieldWithPath("exitCode").description("The exit code of the task execution"), + fieldWithPath("taskName").description("The task name related to the task execution"), + fieldWithPath("startTime").description("The start time of the task execution"), + fieldWithPath("endTime").description("The end time of the task execution"), + fieldWithPath("exitMessage").description("The exit message of the task execution"), + fieldWithPath("arguments").description("The arguments of the task execution"), + fieldWithPath("jobExecutionIds").description("The job executions ids of the task executions"), + fieldWithPath("errorMessage").description("The error message of the task execution"), + fieldWithPath("externalExecutionId").description("The external id of the task execution"), + fieldWithPath("taskExecutionStatus").description("The status of the task execution"), + fieldWithPath("parentExecutionId").description("The id of parent task execution, " + + "null if task execution does not have parent"), + fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"), + subsectionWithPath("appProperties").description("The application properties of the task execution"), + subsectionWithPath("deploymentProperties").description("The deployment properties of the task execution"), + subsectionWithPath("platformName").description("The platform selected for the task execution"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").description("Link to the task execution logs") + ) + )); + } + + @Test + void listTaskExecutions() throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .queryParam("name", "taskB") + .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .queryParam("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated())); this.mockMvc.perform( - get("/tasks/executions") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/tasks/executions") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("sort", "END_TIME,desc")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("sort").optional() + .description("The sort criteria. column name and optional sort direction. Example: END_TIME,desc") + ), + responseFields( + subsectionWithPath("_embedded.taskExecutionResourceList") + .description("Contains a collection of Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.first") + .description("Link to the first page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("_links.last") + .description("Link to the last page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("_links.next") + .description("Link to the next page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("_links.prev") + .description("Link to the previous page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("page").description("Pagination properties")))); + } + + @Test + void listTaskThinExecutions() throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .queryParam("name", "taskB") + .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .queryParam("arguments", "--server.port=8080 --foo=bar") + ) + .andExpect(status().isCreated())); + + this.mockMvc.perform( + get("/tasks/thinexecutions") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("sort", "END_TIME,desc") + ) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("sort").optional() + .description("The sort criteria. column name and optional sort direction (optional). Example: END_TIME,desc") + ), responseFields( - subsectionWithPath("_embedded.taskExecutionResourceList") - .description("Contains a collection of Task Executions/"), - subsectionWithPath("_links.self").description("Link to the task execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + subsectionWithPath("_embedded.taskExecutionThinResourceList") + .description("Contains a collection of thin Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.first") + .description("Link to the first page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("_links.last") + .description("Link to the last page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("_links.next") + .description("Link to the next page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("_links.prev") + .description("Link to the previous page of task execution resources") + .type(JsonFieldType.OBJECT) + .optional(), + subsectionWithPath("page").description("Pagination properties")))); } @Test - public void listTaskExecutionsByName() throws Exception { + void listTaskThinExecutionsByName() throws Exception { this.mockMvc.perform( - get("/tasks/executions") - .param("name", "taskB") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + get("/tasks/thinexecutions") + .queryParam("name", "taskB") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("sort", "END_TIME,desc") + ) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the task execution")), + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("name") + .description("The name associated with the task execution"), + parameterWithName("sort").optional() + .description("The sort criteria. column name and optional sort direction (optional). Example: END_TIME,desc") + ), + responseFields( + subsectionWithPath("_embedded.taskExecutionThinResourceList") + .description("Contains a collection of thin Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("page").description("Pagination properties")))); + } + @Test + void listTaskExecutionsByName() throws Exception { + this.mockMvc.perform( + get("/tasks/executions") + .queryParam("name", "taskB") + .queryParam("page", "0") + .queryParam("size", "10") + .queryParam("sort", "END_TIME,desc") + ) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size"), + parameterWithName("name") + .description("The name associated with the task execution"), + parameterWithName("sort").optional() + .description("The sort criteria. column name and optional sort direction (optional). Example: END_TIME,desc")), responseFields( - subsectionWithPath("_embedded.taskExecutionResourceList") - .description("Contains a collection of Task Executions/"), - subsectionWithPath("_links.self").description("Link to the task execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + subsectionWithPath("_embedded.taskExecutionResourceList") + .description("Contains a collection of Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("page").description("Pagination properties")))); } @Test - public void stopTask() throws Exception { + void stopTask() throws Exception { this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskA") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + post("/tasks/executions") + .queryParam("name", "taskA") + .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .queryParam("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated()); this.mockMvc.perform( - post("/tasks/executions/{id}", 1) - .param("platform", "default")) - .andDo(print()) + post("/tasks/executions/{id}", 1) + ) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The ids of an existing task execution (required)") - ), - requestParameters(parameterWithName("platform") - .description("The platform associated with the task execution(optional)")))); + pathParameters( + parameterWithName("id").description("The ids of an existing task execution") + ) + ) + ); } @Test - public void taskExecutionRemove() throws Exception { + void taskExecutionRemove() throws Exception { - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskB") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .queryParam("name", "taskB") + .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .queryParam("arguments", "--server.port=8080 --foo=bar")) .andExpect(status().isCreated())); this.mockMvc.perform( - delete("/tasks/executions/{ids}?action=CLEANUP", "1")) - .andDo(print()) + delete("/tasks/executions/{ids}?action=CLEANUP", "1")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters(parameterWithName("action").description("Optional. Defaults to: CLEANUP.")), + queryParameters(parameterWithName("action").optional().description("Defaults to: CLEANUP.")), pathParameters(parameterWithName("ids") - .description("The id of an existing task execution (required). Multiple comma separated values are accepted.")) + .description("The id of an existing task execution. Multiple comma separated values are accepted.")) )); } @Test - public void taskExecutionRemoveAndTaskDataRemove() throws Exception { + void taskExecutionRemoveAndTaskDataRemove() throws Exception { this.mockMvc.perform( - delete("/tasks/executions/{ids}?action=CLEANUP,REMOVE_DATA", "1,2")) - .andDo(print()) + delete("/tasks/executions/{ids}?action=CLEANUP,REMOVE_DATA", "1,2")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters(parameterWithName("action").description("Using both actions CLEANUP and REMOVE_DATA simultaneously.")), + queryParameters( + parameterWithName("action").optional().description("Using both actions CLEANUP and REMOVE_DATA simultaneously.") + ), pathParameters(parameterWithName("ids") - .description("Providing 2 comma separated task execution id values.")) + .description("Providing 2 comma separated task execution id values.") + ) )); } - private void createTaskDefinition(String taskName) throws Exception{ - documentation.dontDocument( () -> this.mockMvc.perform( + private void createTaskDefinition(String taskName) throws Exception { + documentation.dontDocument(() -> + this.mockMvc.perform( post("/tasks/definitions") - .param("name", taskName) - .param("definition", "timestamp --format='yyyy MM dd'")) - .andExpect(status().isOk())); + .queryParam("name", taskName) + .queryParam("definition", "timestamp --format='yyyy MM dd'") + ) + ); + } + private void cleanupTaskExecutions(String taskName) throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + delete("/tasks/executions") + .queryParam("name", taskName) + ) + ); + } + private void destroyTaskDefinition(String taskName) throws Exception { + documentation.dontDocument(() -> + this.mockMvc.perform( + delete("/tasks/definitions/{name}", taskName) + ) + ); } - private void destroyTaskDefinition(String taskName) throws Exception{ - documentation.dontDocument( () -> this.mockMvc.perform( - delete("/tasks/definitions/{name}", taskName)) - .andExpect(status().isOk())); + private void executeTask(String taskName) throws Exception { + documentation.dontDocument(() -> + this.mockMvc.perform( + post("/tasks/executions") + .queryParam("name", taskName) + .queryParam("arguments", "--server.port=8080 --foo=bar") + ) + ); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java index b5e78a9544..742811e486 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,31 +16,37 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import java.time.Duration; + +import org.awaitility.Awaitility; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; +import org.springframework.cloud.dataflow.server.service.TaskExecutionService; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Documentation for the {@code /tasks/logs} endpoint. * * @author Ilayaperumal Gopinathan + * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class TaskLogsDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class TaskLogsDocumentation extends BaseDocumentation { @Test - public void getLogsByTaskId() throws Exception { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); + void getLogsByTaskId() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); String taskName = "taskA"; documentation.dontDocument( () -> this.mockMvc.perform( post("/tasks/definitions") @@ -53,15 +59,16 @@ public void getLogsByTaskId() throws Exception { .andExpect(status().isCreated()); TaskDeploymentRepository taskDeploymentRepository = springDataflowServer.getWebApplicationContext().getBean(TaskDeploymentRepository.class); - Thread.sleep(30000); + TaskExecutionService service = springDataflowServer.getWebApplicationContext().getBean(TaskExecutionService.class); + Awaitility.await().atMost(Duration.ofMillis(30000)).until(() -> service.getLog("default", + taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName).getTaskDeploymentId()).length() > 0); this.mockMvc.perform( get("/tasks/logs/"+taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName) .getTaskDeploymentId()).param("platformName", "default")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("platformName").description("The name of the platform the task is launched.")) + queryParameters( + parameterWithName("platformName").optional().description("The name of the platform the task is launched.")) )); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java index efb0cb7e34..36b2479e96 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java @@ -16,40 +16,40 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields; import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Documentation for the /tasks/platforms endpoint. * * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class TaskPlatformDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class TaskPlatformDocumentation extends BaseDocumentation { @Test - public void listTaskPlatforms() throws Exception { + void listTaskPlatforms() throws Exception { this.mockMvc.perform( get("/tasks/platforms") .param("page", "0") .param("size", "10")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size")), responseFields( subsectionWithPath("_embedded.launcherResourceList") .description("Contains a collection of Platform accounts for tasks"), diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java index f337d6cf89..bb8b4fdfae 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -31,7 +31,7 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName; import static org.springframework.restdocs.request.RequestDocumentation.pathParameters; -import static org.springframework.restdocs.request.RequestDocumentation.requestParameters; +import static org.springframework.restdocs.request.RequestDocumentation.queryParameters; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -39,68 +39,70 @@ * Documentation for the /tasks/schedules endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class TaskSchedulerDocumentation extends BaseDocumentation { +@SuppressWarnings({"NewClassNamingConvention","SameParameterValue"}) +@TestMethodOrder(MethodOrderer.MethodName.class) +class TaskSchedulerDocumentation extends BaseDocumentation { - @Before - public void setup() throws Exception { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); createTaskDefinition("mytaskname"); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { destroyTaskDefinition("mytaskname"); unregisterApp(ApplicationType.task, "timestamp"); } @Test - public void createSchedule() throws Exception { + void createSchedule() throws Exception { this.mockMvc.perform( post("/tasks/schedules") - .param("scheduleName", "myschedule") - .param("taskDefinitionName", "mytaskname") - .param("properties", "scheduler.cron.expression=00 22 17 ? *") - .param("arguments", "--foo=bar")) + .queryParam("scheduleName", "myschedule") + .queryParam("taskDefinitionName", "mytaskname") + .queryParam("platform", "default") + .queryParam("properties", "deployer.cron.expression=00 22 17 ? *") + .queryParam("arguments", "--foo=bar")) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( - requestParameters( + queryParameters( parameterWithName("scheduleName").description("The name for the created schedule"), + parameterWithName("platform").optional().description("The name of the platform the task is launched"), parameterWithName("taskDefinitionName") .description("The name of the task definition to be scheduled"), parameterWithName("properties") .description("the properties that are required to schedule and launch the task"), - parameterWithName("arguments").description("the command line arguments to be used for launching the task")))); + parameterWithName("arguments").optional().description("the command line arguments to be used for launching the task")))); } @Test - public void deleteSchedule() throws Exception { + void deleteSchedule() throws Exception { this.mockMvc.perform( delete("/tasks/schedules/{scheduleName}", "mytestschedule")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("scheduleName") - .description("The name of an existing schedule (required)")))); + .description("The name of an existing schedule")))); } @Test - public void listFilteredSchedules() throws Exception { + void listFilteredSchedules() throws Exception { this.mockMvc.perform( get("/tasks/schedules/instances/{task-definition-name}", "FOO") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + .queryParam("page", "0") + .queryParam("size", "10")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("task-definition-name") - .description("Filter schedules based on the specified task definition (required)")), - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), + .description("Filter schedules based on the specified task definition")), + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size")), responseFields( subsectionWithPath("_embedded.scheduleInfoResourceList") .description("Contains a collection of Schedules/"), @@ -109,19 +111,18 @@ public void listFilteredSchedules() throws Exception { } @Test - public void listAllSchedules() throws Exception { + void listAllSchedules() throws Exception { this.mockMvc.perform( get("/tasks/schedules") - .param("page", "0") - .param("size", "10")) - .andDo(print()) + .queryParam("page", "0") + .queryParam("size", "10")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), + queryParameters( + parameterWithName("page").optional() + .description("The zero-based page number"), + parameterWithName("size").optional() + .description("The requested page size")), responseFields( subsectionWithPath("_embedded.scheduleInfoResourceList") .description("Contains a collection of Schedules/"), @@ -132,8 +133,8 @@ public void listAllSchedules() throws Exception { private void createTaskDefinition(String taskName) throws Exception{ documentation.dontDocument( () -> this.mockMvc.perform( post("/tasks/definitions") - .param("name", taskName) - .param("definition", "timestamp --format='yyyy MM dd'")) + .queryParam("name", taskName) + .queryParam("definition", "timestamp --format='yyyy MM dd'")) .andExpect(status().isOk())); } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java index cf462ce569..7f9a20bc24 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -38,30 +38,32 @@ * Documentation for the /tasks/validation endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class TaskValidationDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class TaskValidationDocumentation extends BaseDocumentation { - @Before - public void setup() throws Exception { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); createTaskDefinition("taskC"); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { destroyTaskDefinition("taskC"); unregisterApp(ApplicationType.task, "timestamp"); } - @Test - public void validateTask() throws Exception { + @Test + void validateTask() throws Exception { this.mockMvc.perform( get("/tasks/validation/{name}", "taskC")) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( - parameterWithName("name").description("The name of a task definition to be validated (required)") + parameterWithName("name").description("The name of a task definition to be validated") ), responseFields( fieldWithPath("appName").description("The name of a task definition"), diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java index 82adf0363d..c63bd79079 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.rest.documentation; -import org.junit.After; -import org.junit.Before; -import org.junit.FixMethodOrder; -import org.junit.Test; -import org.junit.runners.MethodSorters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestMethodOrder; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -30,37 +30,37 @@ import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath; import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields; import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath; -import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Documentation for the /tasks/info endpoint. * * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@FixMethodOrder(MethodSorters.NAME_ASCENDING) -public class TasksInfoDocumentation extends BaseDocumentation { +@SuppressWarnings("NewClassNamingConvention") +@TestMethodOrder(MethodOrderer.MethodName.class) +class TasksInfoDocumentation extends BaseDocumentation { - @Before - public void setup() throws Exception { - registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); + @BeforeEach + void setup() throws Exception { + registerApp(ApplicationType.task, "timestamp", "3.0.0"); createTaskDefinition("taskA"); createTaskDefinition("taskB"); } - @After - public void tearDown() throws Exception { + @AfterEach + void tearDown() throws Exception { destroyTaskDefinition("taskA"); destroyTaskDefinition("taskB"); unregisterApp(ApplicationType.task, "timestamp"); } @Test - public void getTaskExecutionsInfo() throws Exception { + void getTaskExecutionsInfo() throws Exception { this.mockMvc.perform( get("/tasks/info/executions?completed=false")) - .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( responseFields( diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties b/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties new file mode 100644 index 0000000000..292487b111 --- /dev/null +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties @@ -0,0 +1,5 @@ +build.artifact=spring-cloud-dataflow-server +build.group=org.springframework.cloud +build.name=Spring Cloud Data Flow Server +build.time=2024-04-25T12\:36\:37.169Z +build.version=2.11.3-SNAPSHOT diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties b/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties new file mode 100644 index 0000000000..3a17e69030 --- /dev/null +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties @@ -0,0 +1,5 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=main +git.commit.id.abbrev=fddafed +git.commit.id.full=fddafed39b919981cbb5bd04bd7fb5266fa25309 +git.commit.time=2024-04-24T13\:35\:29+0200 diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet index da64dda944..dfd62e5661 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet @@ -1,10 +1,11 @@ -{{path}} +`*{{path}}*` |=== -|Parameter|Description +|Parameter - Description {{#parameters}} -|{{#tableCellContent}}`+{{name}}+`{{/tableCellContent}} -|{{#tableCellContent}}{{description}}{{/tableCellContent}} +|{{#tableCellContent}}`*{{name}}*` {{#optional}} [small]#_(optional)_#{{/optional}}{{^optional}} [small]#*(required)*#{{/optional}}{{/tableCellContent}} + +{{#tableCellContent}}{{description}}{{/tableCellContent}} {{/parameters}} |=== \ No newline at end of file diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/query-parameters.snippet b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/query-parameters.snippet new file mode 100644 index 0000000000..a0712f5952 --- /dev/null +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/query-parameters.snippet @@ -0,0 +1,9 @@ +|=== +|Parameter - Description +{{#parameters}} +|{{#tableCellContent}}`*{{name}}*` {{#optional}} [small]#_(optional)_#{{/optional}}{{^optional}} [small]#*(required)*#{{/optional}}{{/tableCellContent}} + +{{#tableCellContent}}{{description}}{{/tableCellContent}} + +{{/parameters}} +|=== \ No newline at end of file diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml index 061f02c72c..da305f6faa 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml @@ -1,4 +1,6 @@ spring: + main: + allow-bean-definition-overriding: true cloud: dataflow: features: @@ -8,6 +10,9 @@ spring: metrics: collector: uri: http://localhost:${fakeMetricsCollector.port} + deployer: + local: + maximumConcurrentTasks: 50 autoconfigure: exclude: >- org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration, @@ -20,7 +25,10 @@ spring: org.springframework.cloud.dataflow.shell.autoconfigure.BaseShellAutoConfiguration, org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryTaskPlatformAutoConfiguration, org.springframework.cloud.dataflow.server.config.kubernetes.KubernetesTaskPlatformAutoConfiguration -maven: - remoteRepositories: - springRepo: - url: https://repo.spring.io/libs-snapshot +management: + info: + build: + enabled: true + git: + enabled: true + mode: full \ No newline at end of file diff --git a/spring-cloud-dataflow-common/README.md b/spring-cloud-dataflow-common/README.md new file mode 100644 index 0000000000..781b568e5f --- /dev/null +++ b/spring-cloud-dataflow-common/README.md @@ -0,0 +1 @@ +# spring-cloud-dataflow-common diff --git a/spring-cloud-dataflow-common/pom.xml b/spring-cloud-dataflow-common/pom.xml new file mode 100644 index 0000000000..1daa30ff73 --- /dev/null +++ b/spring-cloud-dataflow-common/pom.xml @@ -0,0 +1,106 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-parent + 3.0.0-SNAPSHOT + org.springframework.cloud + pom + + Spring Cloud Dataflow Common Parent + Common utilities sharing dataflow family + + + org.springframework.cloud + spring-cloud-dataflow-build + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-build + + + + 0.10.2 + 2.12.7 + + + + spring-cloud-dataflow-common-persistence + spring-cloud-dataflow-common-flyway + spring-cloud-dataflow-common-test-docker + spring-cloud-dataflow-common-test-docker-junit5 + spring-cloud-dataflow-common-dependencies + + + + + + com.github.zafarkhaja + java-semver + ${java-semver.version} + + + joda-time + joda-time + ${joda-time.version} + + + + + + spring + true + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml new file mode 100644 index 0000000000..9a05bdd4e0 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml @@ -0,0 +1,95 @@ + + + 4.0.0 + + spring-cloud-dataflow-dependencies-parent + org.springframework.cloud + 3.0.0-SNAPSHOT + ../../spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent + + spring-cloud-dataflow-common-dependencies + 3.0.0-SNAPSHOT + pom + Spring Cloud Dataflow Common Dependencies + Spring Cloud Dataflow Common Dependencies + + + + + + org.springframework.cloud + spring-cloud-dataflow-common-flyway + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-dataflow-common-test-docker + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-dataflow-common-test-docker-junit5 + ${dataflow.version} + + + + + + spring + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml new file mode 100644 index 0000000000..d10b405347 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml @@ -0,0 +1,78 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-flyway + jar + Spring Cloud Dataflow Common Flyway Support + Spring Cloud Dataflow Common Flyway Support + + org.springframework.cloud + spring-cloud-dataflow-common-parent + 3.0.0-SNAPSHOT + + + + + 10.10.0 + + + + + org.springframework.boot + spring-boot + + + org.springframework + spring-jdbc + + + org.flywaydb + flyway-core + + + org.flywaydb + flyway-mysql + + + org.flywaydb + flyway-sqlserver + + + org.flywaydb + flyway-database-oracle + + + org.flywaydb + flyway-database-db2 + + + org.flywaydb + flyway-database-postgresql + + + org.slf4j + slf4j-api + + + org.springframework.cloud + spring-cloud-dataflow-common-persistence + ${dataflow.version} + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + src/main/resources + true + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java new file mode 100644 index 0000000000..d548a6cc96 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java @@ -0,0 +1,119 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql; + +import java.sql.Connection; +import java.sql.SQLException; +import java.util.List; + +import org.flywaydb.core.api.MigrationVersion; +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.extensibility.Tier; +import org.flywaydb.core.internal.database.base.Database; +import org.flywaydb.core.internal.database.base.Table; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; +import org.flywaydb.database.mysql.mariadb.MariaDBDatabaseType; + +public class MySQL57Database extends Database { + + private final MySQLDatabase delegateDatabase; + + public MySQL57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + this(configuration, jdbcConnectionFactory, statementInterceptor, new MySQLDatabase(configuration, jdbcConnectionFactory, statementInterceptor)); + } + + protected MySQL57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor, MySQLDatabase delegateDatabase) { + super(configuration, jdbcConnectionFactory, statementInterceptor); + this.delegateDatabase = delegateDatabase; + } + + @Override + public String getRawCreateScript(Table table, boolean baseline) { + return delegateDatabase.getRawCreateScript(table, baseline); + } + + @Override + protected MySQLConnection doGetConnection(Connection connection) { + return delegateDatabase.doGetConnection(connection); + } + + @Override + protected MigrationVersion determineVersion() { + return delegateDatabase.determineVersion(); + } + + @Override + public void ensureSupported(Configuration configuration) { + ensureDatabaseIsRecentEnough("5.1"); + if (databaseType instanceof MariaDBDatabaseType) { + ensureDatabaseNotOlderThanOtherwiseRecommendUpgradeToFlywayEdition("10.4", List.of(Tier.ENTERPRISE), configuration); + recommendFlywayUpgradeIfNecessary("10.6"); + } else { + ensureDatabaseNotOlderThanOtherwiseRecommendUpgradeToFlywayEdition("5.7", List.of(Tier.ENTERPRISE), configuration); + recommendFlywayUpgradeIfNecessary("8.0"); + } + } + + @Override + public void close() { + try { + super.close(); + } finally { + delegateDatabase.close(); + } + } + + @Override + protected String doGetCurrentUser() throws SQLException { + return delegateDatabase.doGetCurrentUser(); + } + + @Override + public boolean supportsDdlTransactions() { + return delegateDatabase.supportsDdlTransactions(); + } + + @Override + public String getBooleanTrue() { + return delegateDatabase.getBooleanTrue(); + } + + @Override + public String getBooleanFalse() { + return delegateDatabase.getBooleanFalse(); + } + + @Override + public String getOpenQuote() { + return delegateDatabase.getOpenQuote(); + } + + @Override + public String getCloseQuote() { + return delegateDatabase.getCloseQuote(); + } + + @Override + public boolean catalogIsSchema() { + return delegateDatabase.catalogIsSchema(); + } + + @Override + public boolean useSingleConnection() { + return delegateDatabase.useSingleConnection(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java new file mode 100644 index 0000000000..04b39f74bf --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java @@ -0,0 +1,34 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql; + +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.internal.database.base.Database; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; + +public class MySQL57DatabaseType extends MySQLDatabaseType { + + @Override + public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + return new MySQL57Database(configuration, jdbcConnectionFactory, statementInterceptor); + } + + @Override + public int getPriority() { + return super.getPriority() + 1; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java new file mode 100644 index 0000000000..9020304731 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java @@ -0,0 +1,28 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql.mariadb; + +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; +import org.flywaydb.database.mysql.MySQL57Database; + +public class MariaDB57Database extends MySQL57Database { + + public MariaDB57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + super(configuration, jdbcConnectionFactory, statementInterceptor, new MariaDBDatabase(configuration, jdbcConnectionFactory, statementInterceptor)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java new file mode 100644 index 0000000000..644e420895 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java @@ -0,0 +1,34 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.flywaydb.database.mysql.mariadb; + +import org.flywaydb.core.api.configuration.Configuration; +import org.flywaydb.core.internal.database.base.Database; +import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory; +import org.flywaydb.core.internal.jdbc.StatementInterceptor; + +public class MariaDB57DatabaseType extends MariaDBDatabaseType { + + @Override + public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) { + return new MariaDB57Database(configuration, jdbcConnectionFactory, statementInterceptor); + } + + @Override + public int getPriority() { + return super.getPriority() + 2; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java new file mode 100644 index 0000000000..58050cf9bb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java @@ -0,0 +1,102 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.List; + +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.callback.Context; +import org.flywaydb.core.api.callback.Event; + +import org.springframework.jdbc.BadSqlGrammarException; +import org.springframework.util.ObjectUtils; + +/** + * Base implementation providing some shared features for java based callbacks. + * + * @author Janne Valkealahti + * + */ +public abstract class AbstractCallback implements Callback { + + private final Event event; + private final List commands; + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + /** + * Instantiates a new abstract callback. + * + * @param event the event to hook into + */ + public AbstractCallback(Event event) { + this(event, null); + } + + /** + * Instantiates a new abstract callback. + * + * @param event the event to hook into + * @param commands the sql commands to run + */ + public AbstractCallback(Event event, List commands) { + this.event = event; + this.commands = commands; + } + + @Override + public boolean supports(Event event, Context context) { + return ObjectUtils.nullSafeEquals(this.event, event); + } + + @Override + public boolean canHandleInTransaction(Event event, Context context) { + return true; + } + + @Override + public void handle(Event event, Context context) { + try { + runner.execute(context.getConnection(), getCommands(event, context)); + } + catch(Exception sqe) { + if (sqe instanceof BadSqlGrammarException) { + throw new DataFlowSchemaMigrationException( + "An exception occured during migration. This may indicate " + + "that you have run Spring Batch Jobs or Spring Cloud " + + "Tasks prior to running Spring Cloud Data Flow first. " + + "Data Flow must create these tables.", sqe); + + } + throw sqe; + } + } + + @Override + public String getCallbackName() { + return ""; + } + + /** + * Gets the commands. + * + * @param event the event + * @param context the context + * @return the commands + */ + public List getCommands(Event event, Context context) { + return commands; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java new file mode 100644 index 0000000000..8625692121 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java @@ -0,0 +1,57 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.List; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +/** + * Base implementation providing some shared features for java based migrations. + * + * @author Janne Valkealahti + * + */ +public abstract class AbstractMigration extends BaseJavaMigration { + + private final List commands; + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + /** + * Instantiates a new abstract migration. + * + * @param commands the commands + */ + public AbstractMigration(List commands) { + super(); + this.commands = commands; + } + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), getCommands()); + } + + /** + * Gets the commands. + * + * @return the commands + */ + public List getCommands() { + return commands; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java new file mode 100644 index 0000000000..c59f339de4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.common.flyway; + +/** + * Exception is thrown when an error occurs while migrating the dataflow schema. + * + * @author Glenn Renfro + */ +public class DataFlowSchemaMigrationException extends RuntimeException{ + + private static final long serialVersionUID = 2000527476523962349L; + + /** + * Exception will use the message specified. + * + * @param message the text that will be associated with the exception. + * @param throwable the exception that is being wrapped. + */ + public DataFlowSchemaMigrationException(String message, Throwable throwable) { + super(message, throwable); + } + + /** + * Exception will use the message specified. + * @param message the text that will be associated with the exception. + */ + public DataFlowSchemaMigrationException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java new file mode 100644 index 0000000000..cf5ab9a466 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java @@ -0,0 +1,69 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.DatabaseMetaData; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.boot.jdbc.DatabaseDriver; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.jdbc.support.MetaDataAccessException; + +/** + * Provides utility methods to help with {@link DatabaseDriver} related operations. + */ +public final class DatabaseDriverUtils { + + private static final Logger LOG = LoggerFactory.getLogger(DatabaseDriverUtils.class); + + private DatabaseDriverUtils() { + } + + /** + * Finds a database driver suitable for a datasource. + *

By default, the jdbc url reported from the database metdata is used to determine + * the driver. It also handles the special case where MariaDB reports a 'jdbc:maria' + * url eventhough the original url was prefixed with 'jdbc:mysql'. + * + * @param dataSource the datasource to inspect + * @return a database driver suitable for the datasource + */ + public static DatabaseDriver getDatabaseDriver(DataSource dataSource) { + // copied from boot's flyway auto-config to get matching db vendor id (but adjusted + // to handle the case when MariaDB driver is being used against MySQL database). + try { + String url = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getURL); + DatabaseDriver databaseDriver = DatabaseDriver.fromJdbcUrl(url); + if (databaseDriver == DatabaseDriver.MARIADB) { + // MariaDB reports a 'jdbc:maria' url even when user specified 'jdbc:mysql'. + // Verify the underlying database is not really MySql. + String product = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseProductName); + if (DatabaseDriver.MYSQL.name().equalsIgnoreCase(product)) { + LOG.info("Using MariaDB driver against MySQL database - will use MySQL"); + databaseDriver = DatabaseDriver.MYSQL; + } + } + return databaseDriver; + } + catch (MetaDataAccessException ex) { + throw new IllegalStateException(ex); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java new file mode 100644 index 0000000000..aa98aec176 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java @@ -0,0 +1,98 @@ +/* + * Copyright 2022-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.boot.env.EnvironmentPostProcessor; +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.Ordered; +import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.core.env.MapPropertySource; + +/** + * An {@link ApplicationContextInitializer} that replaces any configured 'spring.flyways.locations' + * properties that contain the '{vendor}' token with 'mysql' when using the MariaDB driver + * to access a MySQL database. + * + *

Typically property manipulation like this is implemented as an {@link EnvironmentPostProcessor} but + * in order to work with applications that are using Config server it must be a context initializer + * so it can run after the {@code org.springframework.cloud.bootstrap.config.PropertySourceBootstrapConfiguration} + * context initializer. + * + * @author Chris Bono + */ +public class FlywayVendorReplacingApplicationContextInitializer implements + ApplicationContextInitializer, Ordered { + + private final Logger log = LoggerFactory.getLogger(FlywayVendorReplacingApplicationContextInitializer.class); + + @Override + public void initialize(ConfigurableApplicationContext applicationContext) { + + ConfigurableEnvironment env = applicationContext.getEnvironment(); + + // If there is a spring.datasource.url prefixed w/ "jdbc:mysql:" and using the MariaDB driver then replace {vendor} + boolean usingMariaDriver = env.getProperty("spring.datasource.driver-class-name", "").equals("org.mariadb.jdbc.Driver"); + boolean usingMySqlUrl = env.getProperty("spring.datasource.url", "").startsWith("jdbc:mysql:"); + if (!(usingMariaDriver && usingMySqlUrl)) { + return; + } + + log.info("Using MariaDB driver w/ MySQL url - looking for '{vendor}' in 'spring.flyway.locations'"); + + // Look for spring.flyway.locations[0..N] and if found then override it w/ vendor replaced version + Map replacedLocations = new HashMap<>(); + + int prodIdx = 0; + while (true) { + String locationPropName = String.format("spring.flyway.locations[%d]", prodIdx++); + String configuredLocation = env.getProperty(locationPropName); + if (configuredLocation == null) { + break; + } + if (configuredLocation.contains("{vendor}")) { + String replaceLocation = configuredLocation.replace("{vendor}", "mysql"); + replacedLocations.put(locationPropName, replaceLocation); + } + } + + if (replacedLocations.isEmpty()) { + log.info("No properties with '{vendor}' found to replace"); + return; + } + + log.info("Replacing '{vendor}' in {}", replacedLocations); + + env.getPropertySources().addFirst(new MapPropertySource("overrideVendorInFlywayLocations", replacedLocations)); + } + + /** + * The precedence for execution order - should execute last. + * + * @return lowest precedence to ensure it executes after other initializers + */ + @Override + public int getOrder() { + return Ordered.LOWEST_PRECEDENCE; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java new file mode 100644 index 0000000000..9e5b94b9aa --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java @@ -0,0 +1,119 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.Connection; +import java.util.Collections; +import java.util.List; + +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Class keeping a sql command and its possible suppressing sql codes together. + * + * @author Janne Valkealahti + * + */ +public class SqlCommand { + + private final String command; + private final List suppressedErrorCodes; + + /** + * Convenience method returning new instance. + * + * @param command the command + * @return the sql command + */ + public static SqlCommand from(String command) { + return new SqlCommand(command, null); + } + + /** + * Convenience method returning new instance. + * + * @param command the command + * @param suppressedErrorCode the suppressed error code + * @return the sql command + */ + public static SqlCommand from(String command, int suppressedErrorCode) { + return new SqlCommand(command, suppressedErrorCode); + } + + public SqlCommand() { + this(null, null); + } + + /** + * Instantiates a new sql command. + * + * @param command the command + * @param suppressedErrorCode the suppressed error code + */ + public SqlCommand(String command, int suppressedErrorCode) { + this(command, Collections.singletonList(suppressedErrorCode)); + } + + /** + * Instantiates a new sql command. + * + * @param command the command + * @param suppressedErrorCodes the suppressed error codes + */ + public SqlCommand(String command, List suppressedErrorCodes) { + this.command = command; + this.suppressedErrorCodes = suppressedErrorCodes; + } + + /** + * Gets the command. + * + * @return the command + */ + public String getCommand() { + return command; + } + + /** + * Gets the suppressed error codes. + * + * @return the suppressed error codes + */ + public List getSuppressedErrorCodes() { + return suppressedErrorCodes; + } + + /** + * Checks if this command can handle execution directly + * in a given jdbc template. + * + * @return true, if command can handle jdbc template + */ + public boolean canHandleInJdbcTemplate() { + return false; + } + + /** + * Handle command in a given jdbc template. + * + * @param jdbcTemplate the jdbc template + * @param connection the sql connection + */ + public void handle(JdbcTemplate jdbcTemplate, Connection connection) { + // expected to get handled in a sub-class + throw new UnsupportedOperationException("Not supported in a base class"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java new file mode 100644 index 0000000000..7e1e42989d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java @@ -0,0 +1,71 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.Connection; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.SingleConnectionDataSource; +import org.springframework.jdbc.support.SQLExceptionTranslator; +import org.springframework.util.ObjectUtils; + +/** + * Simple utility class to run commands with a connection and possibly suppress + * errors. + * + * @author Janne Valkealahti + * + */ +public class SqlCommandsRunner { + + private static final Logger logger = LoggerFactory.getLogger(SqlCommandsRunner.class); + + /** + * Execute list of {@code SqlCommand} by suppressing errors if those are given + * with a command. + * + * @param connection the connection + * @param commands the sql commands + */ + public void execute(Connection connection, List commands) { + JdbcTemplate jdbcTemplate = new JdbcTemplate(new SingleConnectionDataSource(connection, true)); + SQLExceptionTranslator origExceptionTranslator = jdbcTemplate.getExceptionTranslator(); + + for (SqlCommand command : commands) { + if (command.canHandleInJdbcTemplate()) { + command.handle(jdbcTemplate, connection); + } + else { + if(!ObjectUtils.isEmpty(command.getSuppressedErrorCodes())) { + jdbcTemplate.setExceptionTranslator(new SuppressSQLErrorCodesTranslator(command.getSuppressedErrorCodes())); + } + try { + logger.debug("Executing command {}", command.getCommand()); + jdbcTemplate.execute(command.getCommand()); + } catch (SuppressDataAccessException e) { + logger.debug("Suppressing error {}", e); + } + // restore original translator in case next command + // doesn't define suppressing codes. + jdbcTemplate.setExceptionTranslator(origExceptionTranslator); + } + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java new file mode 100644 index 0000000000..28098afbc7 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import org.springframework.dao.DataAccessException; + +/** + * Special type of {@link DataAccessException} indicating that error can be + * suppressed. + * + * @author Janne Valkealahti + * + */ +@SuppressWarnings("serial") +public class SuppressDataAccessException extends DataAccessException { + + /** + * Instantiates a new suppress data access exception. + * + * @param msg the msg + * @param cause the cause + */ + public SuppressDataAccessException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java new file mode 100644 index 0000000000..385daa4905 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java @@ -0,0 +1,68 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.flyway; + +import java.sql.SQLException; +import java.util.Arrays; +import java.util.List; + +import org.slf4j.LoggerFactory; + +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; +import org.springframework.util.Assert; + +/** + * {@link SQLErrorCodeSQLExceptionTranslator} suppressing errors based on + * configured list of codes by throwing dedicated {@link SuppressDataAccessException}. + * + * @author Janne Valkealahti + * + */ +public class SuppressSQLErrorCodesTranslator extends SQLErrorCodeSQLExceptionTranslator { + + private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SuppressSQLErrorCodesTranslator.class); + private final List errorCodes; + + /** + * Instantiates a new suppress SQL error codes translator. + * + * @param errorCode the error code + */ + public SuppressSQLErrorCodesTranslator(int errorCode) { + this(Arrays.asList(errorCode)); + } + + /** + * Instantiates a new suppress SQL error codes translator. + * + * @param errorCodes the error codes + */ + public SuppressSQLErrorCodesTranslator(List errorCodes) { + super(); + Assert.notNull(errorCodes, "errorCodes must be set"); + this.errorCodes = errorCodes; + } + + @Override + protected DataAccessException customTranslate(String task, String sql, SQLException sqlEx) { + logger.debug("Checking sql error code {} against {}", sqlEx.getErrorCode(), errorCodes); + if (errorCodes.contains(sqlEx.getErrorCode())) { + return new SuppressDataAccessException(task, sqlEx); + } + return super.customTranslate(task, sql, sqlEx); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin new file mode 100644 index 0000000000..ccab893acc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin @@ -0,0 +1,2 @@ +org.flywaydb.database.mysql.mariadb.MariaDB57DatabaseType +org.flywaydb.database.mysql.MySQL57DatabaseType diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt new file mode 100644 index 0000000000..a4a0d40d19 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt @@ -0,0 +1 @@ +@flyway.version@ diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java new file mode 100644 index 0000000000..6270f5ff7a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java @@ -0,0 +1,131 @@ +/* + * Copyright 2022-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.common.flyway; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; + +import org.junit.jupiter.api.Named; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.boot.test.context.runner.ApplicationContextRunner; +import org.springframework.core.env.ConfigurableEnvironment; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.params.provider.Arguments.arguments; + +/** + * Tests for {@link FlywayVendorReplacingApplicationContextInitializer}. + */ +class FlywayVendorReplacingApplicationContextInitializerTests { + + @ParameterizedTest(name = "{0}") + @MethodSource("vendorReplacedProperlyProvider") + void vendorReplacedProperly(boolean usingMySqlUrl, boolean usingMariaDriver, List configuredLocationProps, List finalLocationProps) { + List props = new ArrayList<>(); + props.add("spring.datasource.url=" + (usingMySqlUrl ? "jdbc:mysql://localhost:3306/dataflow?permitMysqlScheme" : "jdbc:mariadb://localhost:3306/dataflow")); + props.add("spring.datasource.driver-class-name=" + (usingMariaDriver ? "org.mariadb.jdbc.Driver" : "org.mysql.jdbc.Driver")); + props.addAll(configuredLocationProps); + + // Prime an actual env by running it through the AppContextRunner with the configured properties + new ApplicationContextRunner().withPropertyValues(props.toArray(new String[0])).run((context) -> { + ConfigurableEnvironment env = context.getEnvironment(); + + // Sanity check the locations props are as expected + configuredLocationProps.forEach((location) -> { + String key = location.split("=")[0]; + String value = location.split("=")[1]; + assertThat(env.getProperty(key)).isEqualTo(value); + }); + + // Run the env through the ACI + FlywayVendorReplacingApplicationContextInitializer flywayVendorReplacingInitializer = new FlywayVendorReplacingApplicationContextInitializer(); + flywayVendorReplacingInitializer.initialize(context); + + // Verify they are replaced as expected + finalLocationProps.forEach((location) -> { + String key = location.split("=")[0]; + String value = location.split("=")[1]; + assertThat(env.getProperty(key)).isEqualTo(value); + }); + }); + } + + private static Stream vendorReplacedProperlyProvider() { + return Stream.of( + arguments(Named.of("singleLocationWithVendor",true), true, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/mysql") + ), + arguments(Named.of("singleLocationWithoutVendor",true), true, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/foo"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/foo") + ), + arguments(Named.of("noLocations",true), true, + Collections.emptyList(), + Collections.emptyList() + ), + arguments(Named.of("multiLocationsAllWithVendor",true), true, + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/{vendor}", + "spring.flyway.locations[1]=classpath:org/skipper/db1/{vendor}", + "spring.flyway.locations[2]=classpath:org/skipper/db2/{vendor}"), + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/mysql", + "spring.flyway.locations[1]=classpath:org/skipper/db1/mysql", + "spring.flyway.locations[2]=classpath:org/skipper/db2/mysql") + ), + arguments(Named.of("multiLocationsSomeWithVendor",true), true, + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/{vendor}", + "spring.flyway.locations[1]=classpath:org/skipper/db1/foo", + "spring.flyway.locations[2]=classpath:org/skipper/db2/{vendor}"), + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/mysql", + "spring.flyway.locations[1]=classpath:org/skipper/db1/foo", + "spring.flyway.locations[2]=classpath:org/skipper/db2/mysql") + ), + arguments(Named.of("multiLocationsNoneWithVendor",true), true, + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/foo", + "spring.flyway.locations[1]=classpath:org/skipper/db1/bar", + "spring.flyway.locations[2]=classpath:org/skipper/db2/zaa"), + Arrays.asList( + "spring.flyway.locations[0]=classpath:org/skipper/db0/foo", + "spring.flyway.locations[1]=classpath:org/skipper/db1/bar", + "spring.flyway.locations[2]=classpath:org/skipper/db2/zaa") + ), + arguments(Named.of("mariaUrlWithMariaDriverDoesNotReplace",false), true, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}") + ), + arguments(Named.of("mysqlUrlWithMysqlDriverDoesNotReplace",true), false, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}") + ), + arguments(Named.of("mariaUrlMysqlDriverDoesNotReplace",false), false, + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"), + Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}") + ) + ); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml new file mode 100644 index 0000000000..6edc2eb238 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml @@ -0,0 +1,70 @@ + + + 4.0.0 + + spring-cloud-dataflow-parent + org.springframework.cloud + 3.0.0-SNAPSHOT + ../../spring-cloud-dataflow-parent + + spring-cloud-dataflow-common-persistence + spring-cloud-dataflow-audit + Spring Cloud Data Flow Common Persistence Utilities + jar + + true + 3.4.1 + + + + org.hibernate.orm + hibernate-core + + + org.springframework.data + spring-data-jpa + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java new file mode 100644 index 0000000000..a50350b076 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.persistence.type; + +import org.hibernate.type.AbstractSingleColumnStandardBasicType; +import org.hibernate.type.descriptor.java.StringJavaType; +import org.hibernate.type.descriptor.jdbc.AdjustableJdbcType; +import org.hibernate.type.descriptor.jdbc.ClobJdbcType; +import org.hibernate.type.descriptor.jdbc.VarcharJdbcType; + +import org.springframework.util.Assert; + +/** + * Provide for Hibernate and Postgres incompatibility for columns of type text. + * @author Corneil du Plessis + */ +public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType { + + public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType(); + + + + public DatabaseAwareLobType() { + super( getDbDescriptor(), StringJavaType.INSTANCE ); + } + + public static AdjustableJdbcType getDbDescriptor() { + if( isPostgres() ) { + return VarcharJdbcType.INSTANCE; + } + else { + return ClobJdbcType.DEFAULT; + } + } + + /** + * This method will be used to set an indicator that the database driver in use is PostgreSQL. + * if postgresDB true if PostgreSQL. + */ + private static boolean isPostgres() { + Boolean postgresDatabase = DatabaseTypeAwareInitializer.getPostgresDatabase(); + Assert.notNull(postgresDatabase, "Expected postgresDatabase to be set"); + return postgresDatabase; + } + + @Override + public String getName() { + return "database_aware_lob"; + } +} \ No newline at end of file diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobUserType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobUserType.java new file mode 100644 index 0000000000..0b29fda148 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobUserType.java @@ -0,0 +1,60 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.persistence.type; + +import java.util.function.BiConsumer; + +import org.hibernate.type.descriptor.java.BasicJavaType; +import org.hibernate.type.descriptor.java.StringJavaType; +import org.hibernate.type.descriptor.jdbc.AdjustableJdbcType; +import org.hibernate.type.descriptor.jdbc.ClobJdbcType; +import org.hibernate.type.descriptor.jdbc.JdbcType; +import org.hibernate.type.descriptor.jdbc.VarcharJdbcType; +import org.hibernate.usertype.BaseUserTypeSupport; +import org.hibernate.usertype.UserType; + +import org.springframework.util.Assert; + +/** + * A {@link UserType} that provides for Hibernate and Postgres incompatibility for columns of + * type text. + * + * @author Corneil du Plessis + * @author Chris Bono + * @since 3.0.0 + */ +public class DatabaseAwareLobUserType extends BaseUserTypeSupport { + + @Override + protected void resolve(BiConsumer, JdbcType> resolutionConsumer) { + resolutionConsumer.accept(StringJavaType.INSTANCE, getDbDescriptor()); + } + + public static AdjustableJdbcType getDbDescriptor() { + if( isPostgres() ) { + return VarcharJdbcType.INSTANCE; + } + else { + return ClobJdbcType.DEFAULT; + } + } + + private static boolean isPostgres() { + Boolean postgresDatabase = DatabaseTypeAwareInitializer.getPostgresDatabase(); + Assert.notNull(postgresDatabase, "Expected postgresDatabase to be set"); + return postgresDatabase; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java new file mode 100644 index 0000000000..c67454afe2 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.persistence.type; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.core.Ordered; +import org.springframework.core.env.ConfigurableEnvironment; + +public class DatabaseTypeAwareInitializer implements ApplicationContextInitializer, Ordered { + private static final Logger logger = LoggerFactory.getLogger(DatabaseTypeAwareInitializer.class); + private static Boolean postgresDatabase = null; + + public DatabaseTypeAwareInitializer() { + } + + @Override + public void initialize(ConfigurableApplicationContext applicationContext) { + ConfigurableEnvironment env = applicationContext.getEnvironment(); + String property = env.getProperty("spring.datasource.driver-class-name", ""); + logger.info("checking database driver type:{}", property); + postgresDatabase = property.contains("postgres"); + } + + public static Boolean getPostgresDatabase() { + return postgresDatabase; + } + + @Override + public int getOrder() { + return Ordered.HIGHEST_PRECEDENCE; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories new file mode 100644 index 0000000000..189a0a6f71 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories @@ -0,0 +1 @@ +org.springframework.context.ApplicationContextInitializer=org.springframework.cloud.dataflow.common.persistence.type.DatabaseTypeAwareInitializer \ No newline at end of file diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java new file mode 100644 index 0000000000..1eec1f34a2 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java @@ -0,0 +1,38 @@ +package org.springframework.cloud.dataflow.common.persistence; + +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.boot.test.context.runner.ApplicationContextRunner; +import org.springframework.cloud.dataflow.common.persistence.type.DatabaseTypeAwareInitializer; +import org.springframework.core.env.ConfigurableEnvironment; + +import static org.assertj.core.api.Assertions.assertThat; + +class DatabaseTypeAwareInitializerTest { + private static final Logger logger = LoggerFactory.getLogger(DatabaseTypeAwareInitializerTest.class); + + @Test + void initPostgres() { + initDriverType("org.postgresql.Driver"); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isNotNull(); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isTrue(); + } + + @Test + void initMariaDB() { + initDriverType("org.mariadb.jdbc.Driver"); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isNotNull(); + assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isFalse(); + } + private void initDriverType(String driverClassName) { + // Prime an actual env by running it through the AppContextRunner with the configured properties + new ApplicationContextRunner().withPropertyValues("spring.datasource.driver-class-name=" + driverClassName).run((context) -> { + ConfigurableEnvironment env = context.getEnvironment(); + logger.info("spring.datasource.driver-class-name={}", env.getProperty("spring.datasource.driver-class-name")); + DatabaseTypeAwareInitializer initializer = new DatabaseTypeAwareInitializer(); + initializer.initialize(context); + }); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml new file mode 100644 index 0000000000..10185f900c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml @@ -0,0 +1,36 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-test-docker-junit5 + jar + + Spring Cloud Dataflow Common Docker Test JUnit5 Support + Utilities to help using junit5 + + + org.springframework.cloud + spring-cloud-dataflow-common-parent + 3.0.0-SNAPSHOT + + + true + + + + org.springframework.cloud + spring-cloud-dataflow-common-test-docker + ${project.version} + + + org.junit.jupiter + junit-jupiter-api + + + org.springframework.boot + spring-boot-starter-test + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java new file mode 100644 index 0000000000..3c4be95dd4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Repeatable; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; + +@Documented +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +@Repeatable(DockerComposes.class) +@ExtendWith(DockerComposeExtension.class) +public @interface DockerCompose { + + int order() default 0; + String id() default ""; + boolean start() default true; + String[] locations() default {}; + String[] services() default {}; + String log() default ""; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java new file mode 100644 index 0000000000..81b635c156 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; + +public interface DockerComposeCluster { + + DockerComposeRule getRule(); + void start(); + void stop(); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java new file mode 100644 index 0000000000..e8b3d8c463 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java @@ -0,0 +1,157 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.lang.reflect.Method; +import java.util.List; + +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.junit.jupiter.api.extension.ExtensionContext.Store; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; +import org.junit.platform.commons.util.AnnotationUtils; + +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; +import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerComposeManager.DockerComposeData; + +/** + * {@code JUnit5} extension handling docker compose integration. + * + * @author Janne Valkealahti + * + */ +public class DockerComposeExtension + implements BeforeAllCallback, BeforeEachCallback, AfterAllCallback, AfterEachCallback, ParameterResolver { + + private static final Namespace NAMESPACE = Namespace.create(DockerComposeExtension.class); + + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + // add class level compose info into compose manager + DockerComposeManager dockerComposeManager = getDockerComposeManager(extensionContext); + + Class testClass = extensionContext.getRequiredTestClass(); + String classKey = extensionContext.getRequiredTestClass().getSimpleName(); + + List dockerComposeAnnotations = AnnotationUtils.findRepeatableAnnotations(testClass, DockerCompose.class); + for (DockerCompose dockerComposeAnnotation : dockerComposeAnnotations) { + DockerComposeData dockerComposeData = new DockerComposeData(dockerComposeAnnotation.id(), + dockerComposeAnnotation.locations(), dockerComposeAnnotation.services(), + dockerComposeAnnotation.log(), dockerComposeAnnotation.start(), dockerComposeAnnotation.order()); + dockerComposeManager.addClassDockerComposeData(classKey, dockerComposeData); + } + } + + @Override + public void beforeEach(ExtensionContext context) throws Exception { + // add method level compose info into compose manager + DockerComposeManager dockerComposeManager = getDockerComposeManager(context); + + Method testMethod = context.getRequiredTestMethod(); + String classKey = context.getRequiredTestClass().getSimpleName(); + String methodKey = context.getRequiredTestMethod().getName(); + + List dockerComposeAnnotations = AnnotationUtils.findRepeatableAnnotations(testMethod, DockerCompose.class); + for (DockerCompose dockerComposeAnnotation : dockerComposeAnnotations) { + DockerComposeData dockerComposeData = new DockerComposeData(dockerComposeAnnotation.id(), + dockerComposeAnnotation.locations(), dockerComposeAnnotation.services(), + dockerComposeAnnotation.log(), dockerComposeAnnotation.start(), dockerComposeAnnotation.order()); + dockerComposeManager.addMethodDockerComposeData(classKey, methodKey, dockerComposeData); + } + dockerComposeManager.build(classKey, methodKey); + } + + @Override + public void afterEach(ExtensionContext context) throws Exception { + // clean containers related to class and method + DockerComposeManager dockerComposeManager = getDockerComposeManager(context); + String classKey = context.getRequiredTestClass().getSimpleName(); + String methodKey = context.getRequiredTestMethod().getName(); + dockerComposeManager.stop(classKey, methodKey); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + } + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return (parameterContext.getParameter().getType() == DockerComposeInfo.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + DockerComposeManager dockerComposeManager = getDockerComposeManager(extensionContext); + return new DefaultDockerComposeInfo(dockerComposeManager); + } + + private static DockerComposeManager getDockerComposeManager(ExtensionContext context) { + Class testClass = context.getRequiredTestClass(); + Store store = getStore(context); + return store.getOrComputeIfAbsent(testClass, (key)->{return new DockerComposeManager();}, DockerComposeManager.class); + } + + private static Store getStore(ExtensionContext context) { + return context.getRoot().getStore(NAMESPACE); + } + + private static class DefaultDockerComposeInfo implements DockerComposeInfo { + private final DockerComposeManager dockerComposeManager; + + public DefaultDockerComposeInfo(DockerComposeManager dockerComposeManager) { + this.dockerComposeManager = dockerComposeManager; + } + + @Override + public DockerComposeCluster id(String id) { + return new DefaultDockerComposeCluster(dockerComposeManager, id); + } + } + + private static class DefaultDockerComposeCluster implements DockerComposeCluster { + + private final DockerComposeManager dockerComposeManager; + private final String id; + + public DefaultDockerComposeCluster(DockerComposeManager dockerComposeManager, String id) { + this.dockerComposeManager = dockerComposeManager; + this.id = id; + } + + @Override + public DockerComposeRule getRule() { + return dockerComposeManager.getRule(id); + } + + @Override + public void start() { + dockerComposeManager.startId(id); + } + + @Override + public void stop() { + dockerComposeManager.stopId(id); + } + } +} diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java similarity index 74% rename from spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java rename to spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java index 3c146901d9..4df416d24e 100644 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2018-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,12 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; -package com.acme.boot13; +public interface DockerComposeInfo { -public class Main { - - public static void main(String[] args) { - System.out.println("Hello World"); - } + DockerComposeCluster id(String id); } diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java new file mode 100644 index 0000000000..1afc75d1e8 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java @@ -0,0 +1,273 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule.Builder; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthChecks.toHaveAllPortsOpen; + +/** + * + * @author Janne Valkealahti + * + */ +public class DockerComposeManager { + + private final Map rules = new HashMap<>(); + private final Map> classKeys = new HashMap<>(); + private final Map> methodKeys = new HashMap<>(); + + public DockerComposeManager() {} + + public void addClassDockerComposeData(String classKey, DockerComposeData dockerComposeData) { + String key = dockerComposeData.id + "$" + classKey; + classKeys.putIfAbsent(key, new ArrayList<>()); + classKeys.get(key).add(dockerComposeData); + } + + public void addMethodDockerComposeData(String classKey, String methodKey, DockerComposeData dockerComposeData) { + String key = dockerComposeData.id + "$" + classKey; + if (classKeys.containsKey(key)) { + classKeys.get(key).add(dockerComposeData); + } + else { + key = dockerComposeData.id + "$" + classKey + methodKey; + methodKeys.putIfAbsent(key, new ArrayList<>()); + methodKeys.get(key).add(dockerComposeData); + } + } + + public DockerComposeRule getRule(String id) { + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(0, e.getKey().indexOf("$")); + if (id.equals(idMatch)) { + return e.getValue(); + } + } + throw new IllegalArgumentException("Id " + id + " not found"); + } + + public void build(String classKey, String methodKey) { + + ArrayList toStart = new ArrayList<>(); + + // class level + for (Entry> e : classKeys.entrySet()) { + String key = e.getKey(); + ArrayList locations = new ArrayList<>(); + ArrayList services = new ArrayList<>(); + boolean start = true; + Integer order = Integer.MAX_VALUE; + String log = ""; + for (DockerComposeData dockerComposeData : e.getValue()) { + locations.addAll(Arrays.asList(dockerComposeData.getLocations())); + services.addAll(Arrays.asList(dockerComposeData.getServices())); + if (!dockerComposeData.isStart()) { + start = false; + } + if (dockerComposeData.getOrder() < order) { + order = dockerComposeData.getOrder(); + } + if (dockerComposeData.getLog() != null && dockerComposeData.getLog().length() > 0) { + log = dockerComposeData.getLog(); + } + } + Builder builder = DockerComposeRule.builder(); + builder.files(DockerComposeFiles.from(locations.toArray(new String[0]))); + for (String service : services) { + builder.waitingForService(service, toHaveAllPortsOpen(), DockerComposeRule.DEFAULT_TIMEOUT); + } + builder.saveLogsTo("build/test-docker-logs/" + log + classKey + "-" + methodKey); + DockerComposeRule rule = builder.build(); + rules.put(key, rule); + if (start) { + toStart.add(new OrderingWrapper(order, rule)); + } + } + + // method level + for (Entry> e : methodKeys.entrySet()) { + String key = e.getKey(); + ArrayList locations = new ArrayList<>(); + ArrayList services = new ArrayList<>(); + boolean start = true; + Integer order = Integer.MAX_VALUE; + String log = ""; + for (DockerComposeData dockerComposeData : e.getValue()) { + locations.addAll(Arrays.asList(dockerComposeData.getLocations())); + services.addAll(Arrays.asList(dockerComposeData.getServices())); + if (!dockerComposeData.isStart()) { + start = false; + } + if (dockerComposeData.getOrder() < order) { + order = dockerComposeData.getOrder(); + } + if (dockerComposeData.getLog() != null && dockerComposeData.getLog().length() > 0) { + log = dockerComposeData.getLog(); + } + } + Builder builder = DockerComposeRule.builder(); + builder.files(DockerComposeFiles.from(locations.toArray(new String[0]))); + for (String service : services) { + builder.waitingForService(service, toHaveAllPortsOpen(), DockerComposeRule.DEFAULT_TIMEOUT); + } + builder.saveLogsTo("build/test-docker-logs/" + log + classKey + "-" + methodKey); + DockerComposeRule rule = builder.build(); + rules.put(key, rule); + if (start) { + toStart.add(new OrderingWrapper(order, rule)); + } + } + + Collections.sort(toStart); + for (OrderingWrapper w : toStart) { + try { + w.getRule().before(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + } + + public void stop(String classKey, String methodKey) { + ArrayList toRemove = new ArrayList<>(); + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(e.getKey().indexOf("$") + 1, e.getKey().length()); + if (idMatch.equals(classKey)) { + toRemove.add(e.getKey()); + } + if (idMatch.equals(classKey + methodKey)) { + toRemove.add(e.getKey()); + } + } + for (String remove : toRemove) { + DockerComposeRule rule = rules.remove(remove); + if (rule != null) { + rule.after(); + } + } + // for now, just clear both class and method keys + classKeys.clear(); + methodKeys.clear(); + } + + public void startId(String id) { + DockerComposeRule rule = null; + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(0, e.getKey().indexOf("$")); + if (id.equals(idMatch)) { + rule = e.getValue(); + } + } + if (rule != null) { + try { + rule.before(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + public void stopId(String id) { + DockerComposeRule rule = null; + for (Entry e : rules.entrySet()) { + String idMatch = e.getKey().substring(0, e.getKey().indexOf("$")); + if (id.equals(idMatch)) { + rule = e.getValue(); + } + } + if (rule != null) { + rule.after(); + } + } + + public static class DockerComposeData { + + private final String id; + private final boolean start; + private final String[] locations; + private final String[] services; + private final String log; + private final int order; + + public DockerComposeData(String id, String[] locations, String[] services, String log, boolean start, int order) { + this.id = id; + this.locations = locations; + this.services = services; + this.log = log; + this.start = start; + this.order = order; + } + + public String[] getLocations() { + return locations; + } + + public String[] getServices() { + return services; + } + + public String getLog() { + return log; + } + + public String getId() { + return id; + } + + public boolean isStart() { + return start; + } + + public int getOrder() { + return order; + } + } + + private static class OrderingWrapper implements Comparable{ + Integer order; + DockerComposeRule rule; + + public OrderingWrapper(Integer order, DockerComposeRule rule) { + this.order = order; + this.rule = rule; + } + + public Integer getOrder() { + return order; + } + + public DockerComposeRule getRule() { + return rule; + } + + @Override + public int compareTo(OrderingWrapper o) { + return getOrder().compareTo(o.getOrder()); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java new file mode 100644 index 0000000000..303eec75de --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java @@ -0,0 +1,32 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Documented +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +public @interface DockerComposes { + + DockerCompose[] value(); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java new file mode 100644 index 0000000000..4b641ad6fc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.util.List; + +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterWait; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogCollector; + +public class LegacyDockerComposeExtension extends DockerComposeRule implements BeforeAllCallback, AfterAllCallback { + + private LegacyDockerComposeExtension(DockerComposeFiles files, List clusterWaits, + LogCollector logCollector, DockerMachine machine, boolean pullOnStartup, ProjectName projectName) { + super(files, clusterWaits, logCollector, machine, pullOnStartup, projectName); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + after(); + } + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + before(); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder extends DockerComposeRule.Builder { + + @Override + public LegacyDockerComposeExtension build() { + return new LegacyDockerComposeExtension(files, clusterWaits, logCollector, machine, pullOnStartup, + projectName); + } + } +} diff --git a/spring-cloud-dataflow-shell-core/.jdk8 b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/resources/application.properties similarity index 100% rename from spring-cloud-dataflow-shell-core/.jdk8 rename to spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/resources/application.properties diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java new file mode 100644 index 0000000000..144dcaf0f5 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; + +@DockerCompose(locations = {"src/test/resources/docker-compose-1.yml"}) +class DockerCompose1Tests { + + @Test + void compose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException { + assertThat(dockerComposeInfo).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule()).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule().containers().container("testservice1")).isNotNull(); + + Throwable thrown = catchThrowable(() -> { + dockerComposeInfo.id("").getRule().containers().container("testservice2").state(); + }); + assertThat(thrown) + .isInstanceOf(DockerExecutionException.class) + .hasNoCause(); + assertThat(thrown).message() + .containsIgnoringCase("No such service: testservice2"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java new file mode 100644 index 0000000000..fe0081fd15 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java @@ -0,0 +1,43 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +@DockerCompose(id = DockerCompose2Tests.CLUSTER1, locations = {"src/test/resources/docker-compose-1.yml"}) +@DockerCompose(id = DockerCompose2Tests.CLUSTER2, locations = {"src/test/resources/docker-compose-2.yml"}, start = false) +@ExtendWith(DockerComposeExtension.class) +public class DockerCompose2Tests { + + public final static String CLUSTER1 = "dc1"; + public final static String CLUSTER2 = "dc2"; + public final static String CLUSTER3 = "dc3"; + public final static String CLUSTER4 = "dc4"; + + @Test + @DockerCompose(id = DockerCompose2Tests.CLUSTER3, locations = {"src/test/resources/docker-compose-3.yml"}) + @DockerCompose(id = DockerCompose2Tests.CLUSTER4, locations = {"src/test/resources/docker-compose-4.yml"}, start = false) + void compose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException { + + dockerComposeInfo.id(CLUSTER2).start(); + Thread.sleep(1000); + dockerComposeInfo.id(CLUSTER4).start(); + Thread.sleep(1000); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java new file mode 100644 index 0000000000..a78fb664da --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java @@ -0,0 +1,42 @@ +/* + * Copyright 2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.junit5; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; + +@DockerCompose(locations = {"classpath:org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml"}) +class DockerCompose3Tests { + + @Test + void compose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException { + assertThat(dockerComposeInfo).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule()).isNotNull(); + assertThat(dockerComposeInfo.id("").getRule().containers().container("testservice1")).isNotNull(); + + Throwable thrown = catchThrowable(() -> { + dockerComposeInfo.id("").getRule().containers().container("testservice2").state(); + }); + assertThat(thrown).isInstanceOf(DockerExecutionException.class).hasNoCause() + .message().containsIgnoringCase("No such service: testservice2"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml new file mode 100644 index 0000000000..cb8dbff2d9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml @@ -0,0 +1,4 @@ +services: + testservice1: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml new file mode 100644 index 0000000000..4500793c1f --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml @@ -0,0 +1,5 @@ +services: + testservice2: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml new file mode 100644 index 0000000000..38da37eb91 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml @@ -0,0 +1,5 @@ +services: + testservice3: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml new file mode 100644 index 0000000000..1605ea0e78 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml @@ -0,0 +1,4 @@ +services: + testservice4: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml new file mode 100644 index 0000000000..c7e4357f6c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml @@ -0,0 +1,4 @@ +services: + testservice5: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml new file mode 100644 index 0000000000..682a582af4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml @@ -0,0 +1,5 @@ +services: + testservice6: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml new file mode 100644 index 0000000000..cb8dbff2d9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml @@ -0,0 +1,4 @@ +services: + testservice1: + image: 'springcloud/openjdk:latest' + command: sh -c 'sleep 60' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml new file mode 100644 index 0000000000..efdf8e931d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml @@ -0,0 +1,60 @@ + + + 4.0.0 + + spring-cloud-dataflow-common-test-docker + jar + + Spring Cloud Dataflow Common Docker Test Support + Utilities to help using docker + + + org.springframework.cloud + spring-cloud-dataflow-common-parent + 3.0.0-SNAPSHOT + + + true + + + + org.springframework + spring-core + + + commons-io + commons-io + + + org.apache.commons + commons-lang3 + + + org.slf4j + slf4j-api + + + org.awaitility + awaitility + + + joda-time + joda-time + + + com.github.zafarkhaja + java-semver + + + org.springframework.boot + spring-boot-starter-test + test + + + junit + junit + test + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java new file mode 100644 index 0000000000..448a116b06 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java @@ -0,0 +1,296 @@ +/* + * Copyright 2018-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.joda.time.Duration; +import org.joda.time.ReadableDuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerCache; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterWait; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthCheck; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.ConflictingContainerRemovingDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DefaultDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecutable; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeRunArgument; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeRunOption; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutable; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.RetryingDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.DoNothingLogCollector; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.FileLogCollector; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogCollector; +import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogDirectory; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck.serviceHealthCheck; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck.transformingHealthCheck; + +public class DockerComposeRule { + + public static final Duration DEFAULT_TIMEOUT = Duration.standardMinutes(2); + public static final int DEFAULT_RETRY_ATTEMPTS = 2; + private ProjectName projectName; + + private static final Logger log = LoggerFactory.getLogger(DockerComposeRule.class); + + public DockerPort hostNetworkedPort(int port) { + return new DockerPort(machine().getIp(), port, port); + } + + private DockerComposeFiles files; + private List clusterWaits; + private LogCollector logCollector; + private DockerMachine machine; + private boolean pullOnStartup; + + protected DockerComposeRule() {} + + public DockerComposeRule(DockerComposeFiles files, List clusterWaits, LogCollector logCollector, + DockerMachine machine, boolean pullOnStartup, ProjectName projectName) { + super(); + this.files = files; + this.clusterWaits = clusterWaits; + this.logCollector = logCollector; + this.machine = machine; + this.pullOnStartup = pullOnStartup; + this.projectName = projectName != null ? projectName : ProjectName.random(); + } + + public DockerComposeFiles files() { + return files; + } + + public List clusterWaits() { + return clusterWaits; + } + + public DockerMachine machine() { + return machine != null ? machine : DockerMachine.localMachine().build(); + } + + public ProjectName projectName() { + return projectName; + } + + public DockerComposeExecutable dockerComposeExecutable() { + return DockerComposeExecutable.builder() + .dockerComposeFiles(files()) + .dockerConfiguration(machine()) + .projectName(projectName()) + .build(); + } + + public DockerExecutable dockerExecutable() { + return DockerExecutable.builder() + .dockerConfiguration(machine()) + .build(); + } + + public Docker docker() { + return new Docker(dockerExecutable()); + } + + public ShutdownStrategy shutdownStrategy() { + return ShutdownStrategy.KILL_DOWN; + } + + public DockerCompose dockerCompose() { + DockerCompose dockerCompose = new DefaultDockerCompose(dockerComposeExecutable(), machine()); + return new RetryingDockerCompose(retryAttempts(), dockerCompose); + } + + public Cluster containers() { + return Cluster.builder() + .ip(machine().getIp()) + .containerCache(new ContainerCache(docker(), dockerCompose())) + .build(); + } + + protected int retryAttempts() { + return DEFAULT_RETRY_ATTEMPTS; + } + + protected boolean removeConflictingContainersOnStartup() { + return true; + } + + protected boolean pullOnStartup() { + return pullOnStartup; + } + + protected ReadableDuration nativeServiceHealthCheckTimeout() { + return DEFAULT_TIMEOUT; + } + + protected LogCollector logCollector() { + return logCollector != null ? logCollector : new DoNothingLogCollector(); + } + + public void before() throws IOException, InterruptedException { + log.debug("Starting docker-compose cluster"); + if (pullOnStartup()) { + dockerCompose().pull(); + } + + dockerCompose().build(); + + DockerCompose upDockerCompose = dockerCompose(); + if (removeConflictingContainersOnStartup()) { + upDockerCompose = new ConflictingContainerRemovingDockerCompose(upDockerCompose, docker()); + } + upDockerCompose.up(); + + logCollector().startCollecting(dockerCompose()); + log.debug("Waiting for services"); + new ClusterWait(ClusterHealthCheck.nativeHealthChecks(), nativeServiceHealthCheckTimeout()) + .waitUntilReady(containers()); + clusterWaits().forEach(clusterWait -> clusterWait.waitUntilReady(containers())); + log.debug("docker-compose cluster started"); + } + + public void after() { + try { + shutdownStrategy().shutdown(this.dockerCompose(), this.docker()); + logCollector().stopCollecting(); + } catch (IOException | InterruptedException e) { + throw new RuntimeException("Error cleaning up docker compose cluster", e); + } + } + + public String exec(DockerComposeExecOption options, String containerName, + DockerComposeExecArgument arguments) throws IOException, InterruptedException { + return dockerCompose().exec(options, containerName, arguments); + } + + public String run(DockerComposeRunOption options, String containerName, + DockerComposeRunArgument arguments) throws IOException, InterruptedException { + return dockerCompose().run(options, containerName, arguments); + } + + public static Builder builder() { + return new Builder<>(); + } + + public static class Builder> { + + protected DockerComposeFiles files; + protected List clusterWaits = new ArrayList<>(); + protected LogCollector logCollector; + protected DockerMachine machine; + protected boolean pullOnStartup; + protected ProjectName projectName; + + public T files(DockerComposeFiles files) { + this.files = files; + return self(); + } + + public T file(String dockerComposeYmlFile) { + return files(DockerComposeFiles.from(dockerComposeYmlFile)); + } + + /** + * Save the output of docker logs to files, stored in the path directory. + * + * See {@link LogDirectory} for some useful utilities, for example: + * {@link LogDirectory#circleAwareLogDirectory}. + * + * @param path directory into which log files should be saved + * @return builder for chaining + */ + public T saveLogsTo(String path) { + return logCollector(FileLogCollector.fromPath(path)); + } + + public T logCollector(LogCollector logCollector) { + this.logCollector = logCollector; + return self(); + } + + public T waitingForService(String serviceName, HealthCheck healthCheck, ReadableDuration timeout) { + ClusterHealthCheck clusterHealthCheck = serviceHealthCheck(serviceName, healthCheck); + return addClusterWait(new ClusterWait(clusterHealthCheck, timeout)); + } + + private T addClusterWait(ClusterWait clusterWait) { + clusterWaits.add(clusterWait); + return self(); + } + + public T waitingForServices(List services, HealthCheck> healthCheck) { + return waitingForServices(services, healthCheck, DEFAULT_TIMEOUT); + } + + public T waitingForServices(List services, HealthCheck> healthCheck, ReadableDuration timeout) { + ClusterHealthCheck clusterHealthCheck = serviceHealthCheck(services, healthCheck); + return addClusterWait(new ClusterWait(clusterHealthCheck, timeout)); + } + + public T waitingForHostNetworkedPort(int port, HealthCheck healthCheck) { + return waitingForHostNetworkedPort(port, healthCheck, DEFAULT_TIMEOUT); + } + + public T waitingForHostNetworkedPort(int port, HealthCheck healthCheck, ReadableDuration timeout) { + ClusterHealthCheck clusterHealthCheck = transformingHealthCheck(cluster -> new DockerPort(cluster.ip(), port, port), healthCheck); + return addClusterWait(new ClusterWait(clusterHealthCheck, timeout)); + } + + public T clusterWaits(Iterable elements) { + elements.forEach(e -> clusterWaits.add(e)); + return self(); + } + + public T machine(DockerMachine machine) { + this.machine = machine; + return self(); + } + + public T pullOnStartup(boolean pullOnStartup) { + this.pullOnStartup = pullOnStartup; + return self(); + } + + public T projectName(ProjectName projectName) { + this.projectName = projectName; + return self(); + } + + @SuppressWarnings("unchecked") + final T self() { + return (T) this; + } + + public DockerComposeRule build() { + return new DockerComposeRule(files, clusterWaits, logCollector, machine, pullOnStartup, projectName); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java new file mode 100644 index 0000000000..aad7b918e9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.Assert; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +public final class AdditionalEnvironmentValidator { + + private static final Set ILLEGAL_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_HOST, DOCKER_CERT_PATH)); + + private AdditionalEnvironmentValidator() {} + + public static Map validate(Map additionalEnvironment) { + HashSet invalidVariables = new HashSet<>(additionalEnvironment.keySet()); + invalidVariables.retainAll(ILLEGAL_VARIABLES); + + String errorMessage = invalidVariables.stream() + .collect(Collectors.joining(", ", + "The following variables: ", + " cannot exist in your additional environment variable block as they will interfere with Docker.")); + Assert.state(invalidVariables.isEmpty(), errorMessage); + return additionalEnvironment; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java new file mode 100644 index 0000000000..084a2c9334 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java @@ -0,0 +1,56 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.Assert; + +import static java.util.stream.Collectors.joining; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +public class DaemonEnvironmentValidator implements EnvironmentValidator { + + private static final Set ILLEGAL_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_HOST, DOCKER_CERT_PATH)); + + private static final DaemonEnvironmentValidator INSTANCE = new DaemonEnvironmentValidator(); + + public static DaemonEnvironmentValidator instance() { + return INSTANCE; + } + + private DaemonEnvironmentValidator() {} + + @Override + public void validateEnvironmentVariables(Map dockerEnvironment) { + Set invalidVariables = ILLEGAL_VARIABLES.stream() + .filter(dockerEnvironment::containsKey) + .collect(Collectors.toSet()); + + String errorMessage = invalidVariables.stream() + .collect(joining(", ", + "These variables were set: ", + ". They cannot be set when connecting to a local docker daemon.")); + Assert.state(invalidVariables.isEmpty(), errorMessage); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java new file mode 100644 index 0000000000..2917b70aae --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +public class DaemonHostIpResolver implements HostIpResolver { + + public static final String LOCALHOST = "127.0.0.1"; + + @Override + public String resolveIp(String dockerHost) { + return LOCALHOST; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java new file mode 100644 index 0000000000..8427cc0b76 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java @@ -0,0 +1,95 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.io.File; +import java.net.URL; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import org.springframework.util.Assert; + +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toList; + +public class DockerComposeFiles { + + private final List dockerComposeFiles; + + public DockerComposeFiles(List dockerComposeFiles) { + this.dockerComposeFiles = dockerComposeFiles; + } + + public static DockerComposeFiles from(String... dockerComposeFilenames) { + List dockerComposeFiles = Arrays.asList(dockerComposeFilenames).stream() + .map(fileName -> { + Path path = null; + if (fileName.startsWith("classpath:")) { + URL resourceUrl = ClassLoader.getSystemResource(fileName.substring(10)); + if (resourceUrl == null) { + throw new IllegalArgumentException("Can't find resource " + fileName); + } + try { + path = Paths.get(resourceUrl.toURI()); + } catch (Exception e) { + throw new IllegalArgumentException("Can't find resource " + fileName, e); + } + } else { + path = Paths.get(fileName); + } + return path; + }) + .map(path -> path.toFile()) + .collect(toList()); + validateAtLeastOneComposeFileSpecified(dockerComposeFiles); + validateComposeFilesExist(dockerComposeFiles); + return new DockerComposeFiles(dockerComposeFiles); + } + + public static DockerComposeFiles fromxx(String... dockerComposeFilenames) { + List dockerComposeFiles = Arrays.asList(dockerComposeFilenames).stream() + .map(File::new) + .collect(toList()); + validateAtLeastOneComposeFileSpecified(dockerComposeFiles); + validateComposeFilesExist(dockerComposeFiles); + return new DockerComposeFiles(dockerComposeFiles); + } + + public List constructComposeFileCommand() { + return dockerComposeFiles.stream() + .map(File::getAbsolutePath) + .map(f -> Arrays.asList("--file", f)) + .flatMap(Collection::stream) + .collect(toList()); + } + + private static void validateAtLeastOneComposeFileSpecified(List dockerComposeFiles) { + Assert.state(!dockerComposeFiles.isEmpty(), "A docker compose file must be specified."); + } + + private static void validateComposeFilesExist(List dockerComposeFiles) { + List missingFiles = dockerComposeFiles.stream() + .filter(f -> !f.exists()) + .collect(toList()); + String errorMessage = missingFiles.stream() + .map(File::getAbsolutePath) + .collect(joining(", ", "The following docker-compose files: ", " do not exist.")); + Assert.state(missingFiles.isEmpty(), errorMessage); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java new file mode 100644 index 0000000000..92038f3dab --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java @@ -0,0 +1,55 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Map; +import java.util.Optional; + +public enum DockerType implements HostIpResolver, EnvironmentValidator { + DAEMON(DaemonEnvironmentValidator.instance(), new DaemonHostIpResolver()), + REMOTE(RemoteEnvironmentValidator.instance(), new RemoteHostIpResolver()); + + private final EnvironmentValidator validator; + private final HostIpResolver resolver; + + DockerType(EnvironmentValidator validator, HostIpResolver resolver) { + this.validator = validator; + this.resolver = resolver; + } + + @Override + public void validateEnvironmentVariables(Map dockerEnvironment) { + validator.validateEnvironmentVariables(dockerEnvironment); + } + + @Override + public String resolveIp(String dockerHost) { + return resolver.resolveIp(dockerHost); + } + + public static Optional getFirstValidDockerTypeForEnvironment(Map environment) { + for (DockerType currType : DockerType.values()) { + try { + currType.validateEnvironmentVariables(environment); + return Optional.of(currType); + } catch (IllegalStateException e) { + // ignore and try next type + } + } + return Optional.empty(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java new file mode 100644 index 0000000000..e2b7137e03 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Map; + +public interface EnvironmentValidator { + + /** + * Validates that the entries in the provided map are valid for the current environment. + * The provided map represents the environment variables that should be used for the + * process, where the keys are the environment variable names and the values are the values. + * If the validator determines the state represented by the map is invalid (either because + * required values are missing or forbidden values are present), the method should throw + * an exception. + * + * @param dockerEnvironment A map representing the docker environment + */ + void validateEnvironmentVariables(Map dockerEnvironment); + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java new file mode 100644 index 0000000000..141a667401 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +public interface EnvironmentVariables { + + String TCP_PROTOCOL = "tcp://"; + String DOCKER_CERT_PATH = "DOCKER_CERT_PATH"; + String DOCKER_HOST = "DOCKER_HOST"; + String DOCKER_TLS_VERIFY = "DOCKER_TLS_VERIFY"; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java new file mode 100644 index 0000000000..3136bf4388 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java @@ -0,0 +1,22 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +public interface HostIpResolver { + + String resolveIp(String dockerHost); + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java new file mode 100644 index 0000000000..1541717ea9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java @@ -0,0 +1,22 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + +@Target({ElementType.PACKAGE, ElementType.TYPE}) +@interface PackageVisible {} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java new file mode 100644 index 0000000000..17bd270f03 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java @@ -0,0 +1,78 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Arrays; +import java.util.List; +import java.util.UUID; +import java.util.function.Predicate; +import java.util.regex.Pattern; + +import org.springframework.util.Assert; + +@PackageVisible +public class ProjectName { + + private String projectName; + + public ProjectName(String projectName) { + this.projectName = projectName; + validate(); + } + + protected String projectName() { + return projectName; + } + + protected void validate() { + Assert.state(projectName().trim().length() > 0, "ProjectName must not be blank."); + Assert.state(validCharacters(), + "ProjectName '" + projectName() + "' not allowed, please use lowercase letters and numbers only."); + } + + // Only allows strings that docker-compose-cli would not modify + // https://github.com/docker/compose/blob/85e2fb63b3309280a602f1f76d77d3a82e53b6c2/compose/cli/command.py#L84 + protected boolean validCharacters() { + Predicate illegalCharacters = Pattern.compile("[^a-z0-9]").asPredicate(); + return !illegalCharacters.test(projectName()); + } + + public String asString() { + return projectName(); + } + + public List constructComposeFileCommand() { + return Arrays.asList("--project-name", projectName()); + } + + public static ProjectName random() { + return ProjectName.of(UUID.randomUUID().toString().substring(0, 8)); + } + + /** + * A name consisting of lowercase letters and numbers only. + * + * @param name the name + * @return project name + */ + public static ProjectName fromString(String name) { + return ProjectName.of(name); + } + + private static ProjectName of(String name) { + return new ProjectName(name); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java new file mode 100644 index 0000000000..8013335c06 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java @@ -0,0 +1,72 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import static java.util.stream.Collectors.joining; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +public class RemoteEnvironmentValidator implements EnvironmentValidator { + + private static final Set SECURE_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_CERT_PATH)); + private static final RemoteEnvironmentValidator VALIDATOR = new RemoteEnvironmentValidator(); + + public static RemoteEnvironmentValidator instance() { + return VALIDATOR; + } + + private RemoteEnvironmentValidator() {} + + @Override + public void validateEnvironmentVariables(Map dockerEnvironment) { + Collection missingVariables = getMissingEnvVariables(dockerEnvironment); + String errorMessage = missingVariables.stream() + .collect(joining(", ", + "Missing required environment variables: ", + ". Please run `docker-machine env ` and " + + "ensure they are set on the DockerComposition.")); + + Assert.state(missingVariables.isEmpty(), errorMessage); + } + + private static Collection getMissingEnvVariables(Map dockerEnvironment) { + Collection requiredVariables = new HashSet<>(Arrays.asList(DOCKER_HOST)); + requiredVariables.addAll(secureVariablesRequired(dockerEnvironment)); + return requiredVariables.stream() + .filter(envVariable -> !StringUtils.hasText(dockerEnvironment.get(envVariable))) + .collect(Collectors.toSet()); + } + + private static Set secureVariablesRequired(Map dockerEnvironment) { + return certVerificationEnabled(dockerEnvironment) ? SECURE_VARIABLES : new HashSet<>(); + } + + private static boolean certVerificationEnabled(Map dockerEnvironment) { + return dockerEnvironment.containsKey(DOCKER_TLS_VERIFY); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolver.java new file mode 100644 index 0000000000..c7b4e8c347 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolver.java @@ -0,0 +1,33 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.Optional; + +import org.apache.commons.lang3.StringUtils; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.TCP_PROTOCOL; + +public class RemoteHostIpResolver implements HostIpResolver { + + @Override + public String resolveIp(String dockerHost) { + return Optional.ofNullable(org.springframework.util.StringUtils.hasText(dockerHost) ? dockerHost : null) + .map(host -> StringUtils.substringAfter(host, TCP_PROTOCOL)) + .map(ipAndMaybePort -> StringUtils.substringBefore(ipAndMaybePort, ":")) + .orElseThrow(() -> new IllegalArgumentException("DOCKER_HOST cannot be blank/null")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ShutdownStrategy.java new file mode 100644 index 0000000000..6c5bce4c13 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ShutdownStrategy.java @@ -0,0 +1,56 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.io.IOException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.GracefulShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.KillDownShutdownStrategy; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.SkipShutdownStrategy; + +/** + * How should a cluster of containers be shut down by the `after` method of + * DockerComposeRule. + */ +public interface ShutdownStrategy { + + /** + * Call docker-compose down, kill, then rm. Allows containers up to 10 seconds to shut down + * gracefully. + * + *

With this strategy, you will need to take care not to accidentally write images + * that ignore their down signal, for instance by putting their run command in as a + * string (which is interpreted by a SIGTERM-ignoring bash) rather than an array of strings. + */ + ShutdownStrategy GRACEFUL = new GracefulShutdownStrategy(); + /** + * Call docker-compose kill then down. + */ + ShutdownStrategy KILL_DOWN = new KillDownShutdownStrategy(); + /** + * Skip shutdown, leaving containers running after tests finish executing. + * + *

You can use this option to speed up repeated test execution locally by leaving + * images up between runs. Do not commit it! You will be potentially abandoning + * long-running processes and leaking resources on your CI platform! + */ + ShutdownStrategy SKIP = new SkipShutdownStrategy(); + + void shutdown(DockerCompose dockerCompose, Docker docker) throws IOException, InterruptedException; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Cluster.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Cluster.java new file mode 100644 index 0000000000..69d09b4d08 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Cluster.java @@ -0,0 +1,79 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static java.util.stream.Collectors.toList; + +public class Cluster { + + private final String ip; + private final ContainerCache containerCache; + + public Cluster(String ip, ContainerCache containerCache) { + this.ip = ip; + this.containerCache = containerCache; + } + + public String ip() { + return ip; + } + + public ContainerCache containerCache() { + return containerCache; + } + + public Container container(String name) { + return containerCache().container(name); + } + + public List containers(List containerNames) { + return containerNames.stream() + .map(this::container) + .collect(toList()); + } + + public Set allContainers() throws IOException, InterruptedException { + return containerCache().containers(); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private String ip; + private ContainerCache containerCache; + + public Builder ip(String ip) { + this.ip = ip; + return this; + } + + public Builder containerCache(ContainerCache containerCache) { + this.containerCache = containerCache; + return this; + } + + public Cluster build() { + return new Cluster(ip, containerCache); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Container.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Container.java new file mode 100644 index 0000000000..d1cb148a45 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Container.java @@ -0,0 +1,179 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class Container { + + private static final Logger log = LoggerFactory.getLogger(Container.class); + + private final String containerName; + private final Docker docker; + private final DockerCompose dockerCompose; + + private Supplier portMappings = memoize(() -> this.getDockerPorts()); + + public static Supplier memoize(Supplier original) { + return new Supplier() { + Supplier delegate = this::firstTime; + boolean initialized; + + public T get() { + return delegate.get(); + } + + private synchronized T firstTime() { + if (!initialized) { + T value = original.get(); + delegate = () -> value; + initialized = true; + } + return delegate.get(); + } + }; + } + + public Container(String containerName, Docker docker, DockerCompose dockerCompose) { + this.containerName = containerName; + this.docker = docker; + this.dockerCompose = dockerCompose; + } + + public String getContainerName() { + return containerName; + } + + public SuccessOrFailure portIsListeningOnHttpAndCheckStatus2xx(int internalPort, Function urlFunction) { + return portIsListeningOnHttp(internalPort, urlFunction, true); + } + + public SuccessOrFailure portIsListeningOnHttp(int internalPort, Function urlFunction) { + return portIsListeningOnHttp(internalPort, urlFunction, false); + } + + public SuccessOrFailure portIsListeningOnHttp(int internalPort, Function urlFunction, boolean andCheckStatus) { + try { + DockerPort port = port(internalPort); + if (!port.isListeningNow()) { + return SuccessOrFailure.failure("Internal port " + internalPort + " is not listening in container " + containerName); + } + return port.isHttpRespondingSuccessfully(urlFunction, andCheckStatus) + .mapFailure(failureMessage -> internalPort + " does not have a http response from " + urlFunction.apply(port) + ":\n" + failureMessage); + } catch (Exception e) { + return SuccessOrFailure.fromException(e); + } + } + + public DockerPort portMappedExternallyTo(int externalPort) { + return portMappings.get() + .stream() + .filter(port -> port.getExternalPort() == externalPort) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No port mapped externally to '" + externalPort + "' for container '" + containerName + "'")); + } + + public DockerPort port(int internalPort) { + return portMappings.get() + .stream() + .filter(port -> port.getInternalPort() == internalPort) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No internal port '" + internalPort + "' for container '" + containerName + "': " + portMappings)); + } + + public void start() throws IOException, InterruptedException { + dockerCompose.start(this); + portMappings = memoize(() -> this.getDockerPorts()); + } + + public void stop() throws IOException, InterruptedException { + dockerCompose.stop(this); + } + + public void kill() throws IOException, InterruptedException { + dockerCompose.kill(this); + } + + public State state() throws IOException, InterruptedException { + String id = dockerCompose.id(this).orElse(null); + if (id == null) { + return State.DOWN; + } + return docker.state(id); + } + + public void up() throws IOException, InterruptedException { + dockerCompose.up(this); + } + + public Ports ports() { + return portMappings.get(); + } + + private Ports getDockerPorts() { + try { + return dockerCompose.ports(containerName); + } catch (IOException | InterruptedException e) { + throw new RuntimeException(e); + } + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + Container container = (Container) object; + return Objects.equals(containerName, container.containerName); + } + + @Override + public int hashCode() { + return Objects.hash(containerName); + } + + @Override + public String toString() { + return "Container{containerName='" + containerName + "'}"; + } + + public SuccessOrFailure areAllPortsOpen() { + List unavaliablePorts = portMappings.get().stream() + .filter(port -> !port.isListeningNow()) + .map(DockerPort::getInternalPort) + .collect(Collectors.toList()); + + boolean allPortsOpen = unavaliablePorts.isEmpty(); + String failureMessage = "The following ports failed to open: " + unavaliablePorts; + + return SuccessOrFailure.fromBoolean(allPortsOpen, failureMessage); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCache.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCache.java new file mode 100644 index 0000000000..8a247d98ae --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCache.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +import static java.util.stream.Collectors.toSet; + +public class ContainerCache { + + private final Map containers = new HashMap<>(); + private final Docker docker; + private final DockerCompose dockerCompose; + + public ContainerCache(Docker docker, DockerCompose dockerCompose) { + this.docker = docker; + this.dockerCompose = dockerCompose; + } + + public Container container(String containerName) { + containers.putIfAbsent(containerName, new Container(containerName, docker, dockerCompose)); + return containers.get(containerName); + } + + public Set containers() throws IOException, InterruptedException { + return dockerCompose.services().stream().map(this::container).collect(toSet()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerName.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerName.java new file mode 100644 index 0000000000..c2044408eb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerName.java @@ -0,0 +1,135 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.Arrays; + +import static java.util.stream.Collectors.joining; + +public class ContainerName { + + private String rawName; + private String semanticName; + + public ContainerName(String rawName, String semanticName) { + this.rawName = rawName; + this.semanticName = semanticName; + } + + public String rawName() { + return rawName; + } + + public String semanticName() { + return semanticName; + } + + @Override + public String toString() { + return semanticName(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((rawName == null) ? 0 : rawName.hashCode()); + result = prime * result + ((semanticName == null) ? 0 : semanticName.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + ContainerName other = (ContainerName) obj; + if (rawName == null) { + if (other.rawName != null) + return false; + } else if (!rawName.equals(other.rawName)) + return false; + if (semanticName == null) { + if (other.semanticName != null) + return false; + } else if (!semanticName.equals(other.semanticName)) + return false; + return true; + } + + public static ContainerName fromPsLine(String psLine) { + String[] lineComponents = psLine.split(" "); + String rawName = lineComponents[0]; + + if (probablyCustomName(rawName)) { + return ContainerName.builder() + .rawName(rawName) + .semanticName(rawName) + .build(); + } + + String semanticName = withoutDirectory(withoutScaleNumber(rawName)); + return ContainerName.builder() + .rawName(rawName) + .semanticName(semanticName) + .build(); + } + + private static boolean probablyCustomName(String rawName) { + return !(rawName.split("_").length >= 3); + } + + private static String withoutDirectory(String rawName) { + return Arrays.stream(rawName.split("_")) + .skip(1) + .collect(joining("_")); + } + + public static String withoutScaleNumber(String rawName) { + String[] components = rawName.split("_"); + return Arrays.stream(components) + .limit(components.length - 1) + .collect(joining("_")); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private String rawName; + private String semanticName; + + public Builder rawName(String rawName) { + this.rawName = rawName; + return this; + } + + public Builder semanticName(String semanticName) { + this.semanticName = semanticName; + return this; + } + + public ContainerName build() { + return new ContainerName(rawName, semanticName); + } + + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNames.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNames.java new file mode 100644 index 0000000000..50a32fdecb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNames.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Stream; + +import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.toList; + +public class ContainerNames { + + private ContainerNames() {} + + public static List parseFromDockerComposePs(String psOutput) { + String[] psHeadAndBody = psOutput.split("-+(\r|\n)+"); + if (psHeadAndBody.length < 2) { + return emptyList(); + } + + String psBody = psHeadAndBody[1]; + return psBodyLines(psBody) + .map(ContainerName::fromPsLine) + .collect(toList()); + } + + private static Stream psBodyLines(String psBody) { + String[] lines = psBody.split("(\r|\n)+"); + return Arrays.stream(lines) + .map(String::trim) + .filter(line -> !line.isEmpty()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerMachine.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerMachine.java new file mode 100644 index 0000000000..aab910e793 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerMachine.java @@ -0,0 +1,164 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.AdditionalEnvironmentValidator; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerType; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.RemoteHostIpResolver; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerConfiguration; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +public class DockerMachine implements DockerConfiguration { + + private static final Logger log = LoggerFactory.getLogger(DockerMachine.class); + private static final DockerType FALLBACK_DOCKER_TYPE = DockerType.DAEMON; + + private final String hostIp; + private final Map environment; + + public DockerMachine(String hostIp, Map environment) { + this.hostIp = hostIp; + this.environment = environment; + } + + public String getIp() { + return hostIp; + } + + @Override + public ProcessBuilder configuredDockerComposeProcess() { + ProcessBuilder process = new ProcessBuilder(); + augmentGivenEnvironment(process.environment()); + return process; + } + + private void augmentGivenEnvironment(Map environmentToAugment) { + environmentToAugment.putAll(environment); + } + + public static LocalBuilder localMachine() { + Map systemEnv = System.getenv(); + Optional dockerType = DockerType.getFirstValidDockerTypeForEnvironment(systemEnv); + if (!dockerType.isPresent()) { + log.debug( + "Failed to determine Docker type (daemon or remote) based on current environment. " + + "Proceeding with {} as the type.", FALLBACK_DOCKER_TYPE); + } + + return new LocalBuilder(dockerType.orElse(FALLBACK_DOCKER_TYPE), systemEnv); + } + + public static LocalBuilder localMachine(DockerType dockerType) { + return new LocalBuilder(dockerType, System.getenv()); + } + + public static class LocalBuilder { + + private final DockerType dockerType; + private final Map systemEnvironment; + private Map additionalEnvironment = new HashMap<>(); + + LocalBuilder(DockerType dockerType, Map systemEnvironment) { + this.dockerType = dockerType; + this.systemEnvironment = new HashMap<>(systemEnvironment); + } + + public LocalBuilder withAdditionalEnvironmentVariable(String key, String value) { + additionalEnvironment.put(key, value); + return this; + } + + public LocalBuilder withEnvironment(Map newEnvironment) { + this.additionalEnvironment = new HashMap<>(newEnvironment != null ? newEnvironment : new HashMap<>()); + return this; + } + + public DockerMachine build() { + dockerType.validateEnvironmentVariables(systemEnvironment); + AdditionalEnvironmentValidator.validate(additionalEnvironment); + Map combinedEnvironment = new HashMap<>(); + combinedEnvironment.putAll(systemEnvironment); + combinedEnvironment.putAll(additionalEnvironment); + + String dockerHost = systemEnvironment.getOrDefault(DOCKER_HOST, ""); + return new DockerMachine(dockerType.resolveIp(dockerHost), new HashMap<>(combinedEnvironment)); + } + } + + public static RemoteBuilder remoteMachine() { + return new RemoteBuilder(); + } + + public static class RemoteBuilder { + + private final Map dockerEnvironment = new HashMap<>(); + private Map additionalEnvironment = new HashMap<>(); + + private RemoteBuilder() {} + + public RemoteBuilder host(String hostname) { + dockerEnvironment.put(DOCKER_HOST, hostname); + return this; + } + + public RemoteBuilder withTLS(String certPath) { + dockerEnvironment.put(DOCKER_TLS_VERIFY, "1"); + dockerEnvironment.put(DOCKER_CERT_PATH, certPath); + return this; + } + + public RemoteBuilder withoutTLS() { + dockerEnvironment.remove(DOCKER_TLS_VERIFY); + dockerEnvironment.remove(DOCKER_CERT_PATH); + return this; + } + + public RemoteBuilder withAdditionalEnvironmentVariable(String key, String value) { + additionalEnvironment.put(key, value); + return this; + } + + public RemoteBuilder withEnvironment(Map newEnvironment) { + this.additionalEnvironment = new HashMap<>(newEnvironment != null ? newEnvironment : new HashMap<>()); + return this; + } + + public DockerMachine build() { + DockerType.REMOTE.validateEnvironmentVariables(dockerEnvironment); + AdditionalEnvironmentValidator.validate(additionalEnvironment); + + String dockerHost = dockerEnvironment.getOrDefault(DOCKER_HOST, ""); + String hostIp = new RemoteHostIpResolver().resolveIp(dockerHost); + + Map environment = new HashMap<>(); + environment.putAll(dockerEnvironment); + environment.putAll(additionalEnvironment); + return new DockerMachine(hostIp, environment); + } + + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPort.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPort.java new file mode 100644 index 0000000000..f31147d4b3 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPort.java @@ -0,0 +1,151 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.MalformedURLException; +import java.net.Socket; +import java.net.SocketException; +import java.net.URI; +import java.net.URL; +import java.util.Objects; +import java.util.function.Function; + +import javax.net.ssl.SSLHandshakeException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; + +public class DockerPort { + + private static final Logger log = LoggerFactory.getLogger(DockerPort.class); + + private final String ip; + private final PortMapping portMapping; + + public DockerPort(String ip, int externalPort, int internalPort) { + this(ip, new PortMapping(externalPort, internalPort)); + } + + public DockerPort(String ip, PortMapping portMapping) { + this.ip = ip; + this.portMapping = portMapping; + } + + public String getIp() { + return ip; + } + + public int getExternalPort() { + return portMapping.getExternalPort(); + } + + public int getInternalPort() { + return portMapping.getInternalPort(); + } + + public boolean isListeningNow() { + try (Socket socket = new Socket()) { + socket.connect(new InetSocketAddress(ip, getExternalPort()), 500); + log.trace("External Port '{}' on ip '{}' was open", getExternalPort(), ip); + return true; + } catch (IOException e) { + return false; + } + } + + public boolean isHttpResponding(Function urlFunction, boolean andCheckStatus) { + return isHttpRespondingSuccessfully(urlFunction, andCheckStatus).succeeded(); + } + + public SuccessOrFailure isHttpRespondingSuccessfully(Function urlFunction, boolean andCheckStatus) { + URL url; + try { + String urlString = urlFunction.apply(this); + log.trace("Trying to connect to {}", urlString); + url = URI.create(urlString).toURL(); + } catch (MalformedURLException e) { + throw new RuntimeException("Could not create URL for connecting to localhost", e); + } + try { + url.openConnection().connect(); + url.openStream().read(); + log.debug("Http connection acquired, assuming port active"); + return SuccessOrFailure.success(); + } catch (SocketException e) { + return SuccessOrFailure.failureWithCondensedException("Failed to acquire http connection, assuming port inactive", e); + } catch (FileNotFoundException e) { + return SuccessOrFailure.fromBoolean(!andCheckStatus, "Received 404, assuming port inactive: " + e.getMessage()); + } catch (SSLHandshakeException e) { + return SuccessOrFailure.failureWithCondensedException("Received bad SSL response, assuming port inactive", e); + } catch (IOException e) { + return SuccessOrFailure.failureWithCondensedException("Error acquiring http connection, assuming port open but inactive", e); + } + } + + /** + * Formats the docker port into a particular form. + *

+ * Example: dockerPort.inFormat("https://$HOST:$EXTERNAL_PORT/api") + *

+ * Available options are: + *
    + *
  • $HOST - the hostname/ip address of the docker port
  • + *
  • $EXTERNAL_PORT - the external version of the docker port
  • + *
  • $INTERNAL_PORT - the internal version of the docker port
  • + *
+ * + * @param format a format string using the substitutions listed above + * @return formattedDockerPort the details of the {@link DockerPort} in the specified format + */ + public String inFormat(String format) { + return format + .replaceAll("\\$HOST", getIp()) + .replaceAll("\\$EXTERNAL_PORT", String.valueOf(getExternalPort())) + .replaceAll("\\$INTERNAL_PORT", String.valueOf(getInternalPort())); + + } + + @Override + public int hashCode() { + return Objects.hash(ip, portMapping); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DockerPort other = (DockerPort) obj; + return Objects.equals(ip, other.ip) + && Objects.equals(portMapping, other.portMapping); + } + + @Override + public String toString() { + return "DockerPort [ip=" + ip + ", portMapping=" + portMapping + "]"; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortMapping.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortMapping.java new file mode 100644 index 0000000000..c906212031 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortMapping.java @@ -0,0 +1,65 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.Objects; + +public class PortMapping { + + private final int externalPort; + private final int internalPort; + + public PortMapping(int externalPort, int internalPort) { + this.externalPort = externalPort; + this.internalPort = internalPort; + } + + public int getExternalPort() { + return externalPort; + } + + public int getInternalPort() { + return internalPort; + } + + @Override + public int hashCode() { + return Objects.hash(externalPort, internalPort); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + PortMapping other = (PortMapping) obj; + return Objects.equals(externalPort, other.externalPort) + && Objects.equals(internalPort, other.internalPort); + } + + @Override + public String toString() { + return "PortMapping [externalPort=" + externalPort + ", internalPort=" + + internalPort + "]"; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Ports.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Ports.java new file mode 100644 index 0000000000..90b6185120 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/Ports.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Stream; + +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +public class Ports { + + private static final Pattern PORT_PATTERN = Pattern.compile("((\\d+).(\\d+).(\\d+).(\\d+)):(\\d+)->(\\d+)/tcp"); + private static final int IP_ADDRESS = 1; + private static final int EXTERNAL_PORT = 6; + private static final int INTERNAL_PORT = 7; + + private static final String NO_IP_ADDRESS = "0.0.0.0"; + + private final List ports; + + public Ports(List ports) { + this.ports = ports; + } + + public Ports(DockerPort port) { + this(Collections.singletonList(port)); + } + + public Stream stream() { + return ports.stream(); + } + + public static Ports parseFromDockerComposePs(String psOutput, String dockerMachineIp) { + Assert.state(StringUtils.hasText(psOutput), "No container found"); + Matcher matcher = PORT_PATTERN.matcher(psOutput); + List ports = new ArrayList<>(); + while (matcher.find()) { + String matchedIpAddress = matcher.group(IP_ADDRESS); + String ip = matchedIpAddress.equals(NO_IP_ADDRESS) ? dockerMachineIp : matchedIpAddress; + int externalPort = Integer.parseInt(matcher.group(EXTERNAL_PORT)); + int internalPort = Integer.parseInt(matcher.group(INTERNAL_PORT)); + + ports.add(new DockerPort(ip, externalPort, internalPort)); + } + return new Ports(ports); + } + + @Override + public int hashCode() { + return Objects.hash(ports); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Ports other = (Ports) obj; + return Objects.equals(ports, other.ports); + } + + @Override + public String toString() { + return "Ports [ports=" + ports + "]"; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/State.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/State.java new file mode 100644 index 0000000000..f247c949c6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/State.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +public enum State { + DOWN, PAUSED, UNHEALTHY, HEALTHY; + + /** + * Returns true if the container is up, unpaused and healthy. + * + * @return true if the container is up, unpaused and healthy + */ + public boolean isHealthy() { + return this == HEALTHY; + } + + /** + * Returns true if the container is up but not necessarily unpaused or healthy. + * + * @return true if the container is up but not necessarily unpaused or healthy + */ + public boolean isUp() { + return this != DOWN; + } + + /** + * Returns true if the container is paused. + * + * @return true if the container is paused + */ + public boolean isPaused() { + return this == PAUSED; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Attempt.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Attempt.java new file mode 100644 index 0000000000..315e93b761 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Attempt.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +@FunctionalInterface +public interface Attempt { + boolean attempt() throws Exception; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterHealthCheck.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterHealthCheck.java new file mode 100644 index 0000000000..c2ac6c76af --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterHealthCheck.java @@ -0,0 +1,76 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.io.IOException; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Function; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.State; + +import static java.util.stream.Collectors.joining; + +@FunctionalInterface +public interface ClusterHealthCheck { + static ClusterHealthCheck serviceHealthCheck(List containerNames, HealthCheck> delegate) { + return transformingHealthCheck(cluster -> cluster.containers(containerNames), delegate); + } + + static ClusterHealthCheck serviceHealthCheck(String containerName, HealthCheck containerCheck) { + return transformingHealthCheck(cluster -> cluster.container(containerName), containerCheck); + } + + static ClusterHealthCheck transformingHealthCheck(Function transform, HealthCheck healthCheck) { + return cluster -> { + T target = transform.apply(cluster); + return healthCheck.isHealthy(target); + }; + } + + /** + * Returns a check that the native "healthcheck" status of the docker containers is not unhealthy. + * + *

Does not wait for DOWN or PAUSED containers, or containers with no healthcheck defined. + * + * @return native health checks + */ + static ClusterHealthCheck nativeHealthChecks() { + return cluster -> { + Set unhealthyContainers = new LinkedHashSet<>(); + try { + for (Container container : cluster.allContainers()) { + State state = container.state(); + if (state == State.UNHEALTHY) { + unhealthyContainers.add(container.getContainerName()); + } + } + if (!unhealthyContainers.isEmpty()) { + return SuccessOrFailure.failure( + "The following containers are not healthy: " + unhealthyContainers.stream().collect(joining(", "))); + } + return SuccessOrFailure.success(); + } catch (IOException e) { + return SuccessOrFailure.fromException(e); + } + }; + } + + SuccessOrFailure isClusterHealthy(Cluster cluster) throws InterruptedException; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWait.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWait.java new file mode 100644 index 0000000000..4fc068a6b1 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWait.java @@ -0,0 +1,74 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.Optional; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import org.awaitility.Awaitility; +import org.awaitility.core.ConditionTimeoutException; +import org.joda.time.ReadableDuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; + +public class ClusterWait { + private static final Logger log = LoggerFactory.getLogger(ClusterWait.class); + private final ClusterHealthCheck clusterHealthCheck; + private final ReadableDuration timeout; + + public ClusterWait(ClusterHealthCheck clusterHealthCheck, ReadableDuration timeout) { + this.clusterHealthCheck = clusterHealthCheck; + this.timeout = timeout; + } + + public void waitUntilReady(Cluster cluster) { + final AtomicReference> lastSuccessOrFailure = new AtomicReference<>( + Optional.empty()); + + log.info("Waiting for cluster to be healthy"); + try { + Awaitility.await() + .pollInterval(50, TimeUnit.MILLISECONDS) + .atMost(timeout.getMillis(), TimeUnit.MILLISECONDS) + .until(weHaveSuccess(cluster, lastSuccessOrFailure)); + } catch (ConditionTimeoutException e) { + throw new IllegalStateException(serviceDidNotStartupExceptionMessage(lastSuccessOrFailure)); + } + } + + private Callable weHaveSuccess(Cluster cluster, + AtomicReference> lastSuccessOrFailure) { + return () -> { + SuccessOrFailure successOrFailure = clusterHealthCheck.isClusterHealthy(cluster); + lastSuccessOrFailure.set(Optional.of(successOrFailure)); + return successOrFailure.succeeded(); + }; + } + + private static String serviceDidNotStartupExceptionMessage( + AtomicReference> lastSuccessOrFailure) { + String healthcheckFailureMessage = lastSuccessOrFailure.get() + .flatMap(SuccessOrFailure::toOptionalFailureMessage) + .orElse("The healthcheck did not finish before the timeout"); + + return "The cluster failed to pass a startup check: " + healthcheckFailureMessage; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Exceptions.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Exceptions.java new file mode 100644 index 0000000000..0b18084b08 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/Exceptions.java @@ -0,0 +1,29 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.stream.Collectors; +import org.apache.commons.lang3.exception.ExceptionUtils; + +public enum Exceptions { + ; + + public static String condensedStacktraceFor(Throwable throwable) { + return ExceptionUtils.getThrowableList(throwable).stream() + .map(t -> t.getClass().getCanonicalName() + ": " + t.getMessage()) + .collect(Collectors.joining("\n")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthCheck.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthCheck.java new file mode 100644 index 0000000000..ac2e5b14b0 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthCheck.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +@FunctionalInterface +public interface HealthCheck { + SuccessOrFailure isHealthy(T target); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthChecks.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthChecks.java new file mode 100644 index 0000000000..ddaa06c031 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HealthChecks.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.function.Function; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +public final class HealthChecks { + + private HealthChecks() {} + + public static HealthCheck toRespondOverHttp(int internalPort, Function urlFunction) { + return container -> container.portIsListeningOnHttp(internalPort, urlFunction); + } + + public static HealthCheck toRespond2xxOverHttp(int internalPort, Function urlFunction) { + return container -> container.portIsListeningOnHttpAndCheckStatus2xx(internalPort, urlFunction); + } + + public static HealthCheck toHaveAllPortsOpen() { + return Container::areAllPortsOpen; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailure.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailure.java new file mode 100644 index 0000000000..9d376fa1fc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailure.java @@ -0,0 +1,93 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.Optional; +import java.util.function.Function; +import org.apache.commons.lang3.exception.ExceptionUtils; + +public class SuccessOrFailure { + + private Optional optionalFailureMessage; + + public SuccessOrFailure(Optional optionalFailureMessage) { + this.optionalFailureMessage = optionalFailureMessage; + } + + public static SuccessOrFailure onResultOf(Attempt attempt) { + try { + return fromBoolean(attempt.attempt(), "Attempt to complete healthcheck failed"); + } catch (Exception e) { + return fromException(e); + } + } + + public SuccessOrFailure mapFailure(Function mapper) { + if (this.succeeded()) { + return this; + } else { + return failure(mapper.apply(failureMessage())); + } + } + + protected Optional optionalFailureMessage() { + return optionalFailureMessage; + } + + public static SuccessOrFailure success() { + return SuccessOrFailure.of(Optional.empty()); + } + + private static SuccessOrFailure of(Optional empty) { + return new SuccessOrFailure(empty); + } + + public static SuccessOrFailure failure(String message) { + return SuccessOrFailure.of(Optional.of(message)); + } + + public static SuccessOrFailure failureWithCondensedException(String message, Exception exception) { + return failure(message + ":\n" + Exceptions.condensedStacktraceFor(exception)); + } + + public static SuccessOrFailure fromBoolean(boolean succeeded, String possibleFailureMessage) { + if (succeeded) { + return success(); + } else { + return failure(possibleFailureMessage); + } + } + + public boolean failed() { + return optionalFailureMessage().isPresent(); + } + + public boolean succeeded() { + return !failed(); + } + + public String failureMessage() { + return optionalFailureMessage().get(); + } + + public Optional toOptionalFailureMessage() { + return optionalFailureMessage(); + } + + public static SuccessOrFailure fromException(Exception exception) { + return SuccessOrFailure.failure("Encountered an exception: " + ExceptionUtils.getStackTrace(exception)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Command.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Command.java new file mode 100644 index 0000000000..d72d9a9df8 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Command.java @@ -0,0 +1,106 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Arrays; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.function.Consumer; + +import org.springframework.util.Assert; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.concurrent.Executors.newSingleThreadExecutor; +import static java.util.stream.Collectors.joining; + +public class Command { + + public static final int HOURS_TO_WAIT_FOR_STD_OUT_TO_CLOSE = 12; + + public static final int MINUTES_TO_WAIT_AFTER_STD_OUT_CLOSES = 1; + + private final Executable executable; + + private final Consumer logConsumer; + + public Command(Executable executable, Consumer logConsumer) { + this.executable = executable; + this.logConsumer = logConsumer; + } + + public String execute(ErrorHandler errorHandler, boolean composeCommand, String... commands) throws IOException, InterruptedException { + ProcessResult result = run(composeCommand, commands); + + if (result.exitCode() != 0) { + errorHandler.handle(result.exitCode(), result.output(), executable.commandName(), commands); + } + + return result.output(); + } + + public static ErrorHandler throwingOnError() { + return (exitCode, output, commandName, commands) -> { + String message = + constructNonZeroExitErrorMessage(exitCode, commandName, commands) + "\nThe output was:\n" + output; + throw new DockerExecutionException(message); + }; + } + + private static String constructNonZeroExitErrorMessage(int exitCode, String commandName, String... commands) { + return "'" + commandName + " " + Arrays.stream(commands).collect(joining(" ")) + "' returned exit code " + + exitCode; + } + + private ProcessResult run(boolean composeCommand, String... commands) throws IOException, InterruptedException { + Process process = executable.execute(composeCommand, commands); + Assert.notNull(process, () -> "expected process from " + composeCommand + ":" + Arrays.asList(commands)); + ExecutorService exec = newSingleThreadExecutor(); + Future outputProcessing = exec + .submit(() -> processOutputFrom(process)); + + String output = waitForResultFrom(outputProcessing); + + process.waitFor(MINUTES_TO_WAIT_AFTER_STD_OUT_CLOSES, TimeUnit.MINUTES); + exec.shutdown(); + + return new ProcessResult(process.exitValue(), output); + } + + private String processOutputFrom(Process process) { + return asReader(process.getInputStream()).lines() + .peek(logConsumer) + .collect(joining(System.lineSeparator())); + } + + private static String waitForResultFrom(Future outputProcessing) { + try { + return outputProcessing.get(HOURS_TO_WAIT_FOR_STD_OUT_TO_CLOSE, TimeUnit.HOURS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + throw new RuntimeException(e); + } + } + + private static BufferedReader asReader(InputStream inputStream) { + return new BufferedReader(new InputStreamReader(inputStream, UTF_8)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerCompose.java new file mode 100644 index 0000000000..30511b5118 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerCompose.java @@ -0,0 +1,90 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.util.Assert; + +public class ConflictingContainerRemovingDockerCompose extends DelegatingDockerCompose { + private static final Logger log = LoggerFactory.getLogger(ConflictingContainerRemovingDockerCompose.class); + private static final Pattern NAME_CONFLICT_PATTERN = Pattern.compile("name \"([^\"]*)\" is already in use"); + + private final Docker docker; + private final int retryAttempts; + + public ConflictingContainerRemovingDockerCompose(DockerCompose dockerCompose, Docker docker) { + this(dockerCompose, docker, 1); + } + + public ConflictingContainerRemovingDockerCompose(DockerCompose dockerCompose, Docker docker, int retryAttempts) { + super(dockerCompose); + + Assert.state(retryAttempts >= 1, "retryAttempts must be at least 1, was " + retryAttempts); + this.docker = docker; + this.retryAttempts = retryAttempts; + } + + @Override + public void up() throws IOException, InterruptedException { + for (int currRetryAttempt = 0; currRetryAttempt <= retryAttempts; currRetryAttempt++) { + try { + getDockerCompose().up(); + return; + } catch (DockerExecutionException e) { + Set conflictingContainerNames = getConflictingContainerNames(e.getMessage()); + if (conflictingContainerNames.isEmpty()) { + // failed due to reason other than conflicting containers, so re-throw + throw e; + } + + log.debug("docker-compose up failed due to container name conflicts (container names: {}). " + + "Removing containers and attempting docker-compose up again (attempt {}).", + conflictingContainerNames, currRetryAttempt + 1); + removeContainers(conflictingContainerNames); + } + } + + throw new DockerExecutionException("docker-compose up failed"); + } + + private void removeContainers(Collection containerNames) throws IOException, InterruptedException { + try { + docker.rm(containerNames); + } catch (DockerExecutionException e) { + // there are cases such as in CircleCI where 'docker rm' returns a non-0 exit code and "fails", + // but container is still effectively removed as far as conflict resolution is concerned. Because + // of this, be permissive and do not fail task even if 'rm' fails. + log.debug("docker rm failed, but continuing execution", e); + } + } + + Set getConflictingContainerNames(String output) { + HashSet set = new HashSet<>(); + Matcher matcher = NAME_CONFLICT_PATTERN.matcher(output); + while (matcher.find()) { + set.add(matcher.group(1)); + } + return set; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DefaultDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DefaultDockerCompose.java new file mode 100644 index 0000000000..e7a762a656 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DefaultDockerCompose.java @@ -0,0 +1,261 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import com.github.zafarkhaja.semver.Version; +import org.apache.commons.io.IOUtils; +import org.awaitility.Awaitility; +import org.joda.time.Duration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerNames; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; +import org.springframework.util.StringUtils; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static org.apache.commons.lang3.Validate.validState; +import static org.joda.time.Duration.standardMinutes; + +public class DefaultDockerCompose implements DockerCompose { + + public static final Version VERSION_1_7_0 = Version.parse("1.7.0"); + private static final Duration COMMAND_TIMEOUT = standardMinutes(2); + private static final Duration LOG_WAIT_TIMEOUT = standardMinutes(30); + private static final Logger log = LoggerFactory.getLogger(DefaultDockerCompose.class); + + private final Command command; + private final DockerMachine dockerMachine; + private final DockerComposeExecutable rawExecutable; + + + public DefaultDockerCompose(DockerComposeFiles dockerComposeFiles, DockerMachine dockerMachine, ProjectName projectName) { + this(DockerComposeExecutable.builder() + .dockerComposeFiles(dockerComposeFiles) + .dockerConfiguration(dockerMachine) + .projectName(projectName) + .build(), dockerMachine); + } + + public DefaultDockerCompose(DockerComposeExecutable rawExecutable, DockerMachine dockerMachine) { + this.rawExecutable = rawExecutable; + this.command = new Command(rawExecutable, log::debug); + this.dockerMachine = dockerMachine; + } + + @Override + public void pull() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "pull"); + } + + @Override + public void build() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "build"); + } + + @Override + public void up() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "up", "-d"); + } + + @Override + public void down() throws IOException, InterruptedException { + command.execute(swallowingDownCommandDoesNotExist(), true, "down", "--volumes"); + } + + @Override + public void kill() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "kill"); + } + + @Override + public void rm() throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "rm", "--force", "-v"); + } + + @Override + public void up(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "up", "-d", container.getContainerName()); + } + + @Override + public void start(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "start", container.getContainerName()); + } + + @Override + public void stop(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "stop", container.getContainerName()); + } + + @Override + public void kill(Container container) throws IOException, InterruptedException { + command.execute(Command.throwingOnError(), true, "kill", container.getContainerName()); + } + + @Override + public String exec(DockerComposeExecOption dockerComposeExecOption, String containerName, + DockerComposeExecArgument dockerComposeExecArgument) throws IOException, InterruptedException { + verifyDockerComposeVersionAtLeast(VERSION_1_7_0, "You need at least docker-compose 1.7 to run docker-compose exec"); + String[] fullArgs = constructFullDockerComposeExecArguments(dockerComposeExecOption, containerName, dockerComposeExecArgument); + if (log.isDebugEnabled()) { + log.debug("exec:{}", StringUtils.collectionToDelimitedString(Arrays.asList(fullArgs), " ")); + } + return command.execute(Command.throwingOnError(), true, fullArgs); + } + + @Override + public String run(DockerComposeRunOption dockerComposeRunOption, String containerName, + DockerComposeRunArgument dockerComposeRunArgument) throws IOException, InterruptedException { + String[] fullArgs = constructFullDockerComposeRunArguments(dockerComposeRunOption, containerName, dockerComposeRunArgument); + return command.execute(Command.throwingOnError(), true, fullArgs); + } + + private void verifyDockerComposeVersionAtLeast(Version targetVersion, String message) throws IOException, InterruptedException { + validState(version().isHigherThanOrEquivalentTo(targetVersion), message); + } + + private Version version() throws IOException, InterruptedException { + String versionOutput = command.execute(Command.throwingOnError(), false, "-v"); + return DockerComposeVersion.parseFromDockerComposeVersion(versionOutput); + } + + private static String[] constructFullDockerComposeExecArguments(DockerComposeExecOption dockerComposeExecOption, + String containerName, DockerComposeExecArgument dockerComposeExecArgument) { + // The "-T" option here disables pseudo-TTY allocation, which is not useful here since we are not using + // terminal features here (e.g. we are not sending ^C to kill the executed process). + // Disabling pseudo-TTY allocation means this will work on OS's that don't support TTY (i.e. Windows) + List fullArgs = new ArrayList<>(); + fullArgs.add("exec"); + fullArgs.add("-T"); + fullArgs.addAll(dockerComposeExecOption.options()); + fullArgs.add(containerName); + fullArgs.addAll(dockerComposeExecArgument.arguments()); + return fullArgs.toArray(new String[fullArgs.size()]); + } + + private static String[] constructFullDockerComposeRunArguments(DockerComposeRunOption dockerComposeRunOption, + String containerName, DockerComposeRunArgument dockerComposeRunArgument) { + List fullArgs = new ArrayList<>(); + fullArgs.add("run"); + fullArgs.addAll(dockerComposeRunOption.options()); + fullArgs.add(containerName); + fullArgs.addAll(dockerComposeRunArgument.arguments()); + return fullArgs.toArray(new String[fullArgs.size()]); + } + + @Override + public List ps() throws IOException, InterruptedException { + String psOutput = command.execute(Command.throwingOnError(), true, "ps"); + return ContainerNames.parseFromDockerComposePs(psOutput); + } + + @Override + public Optional id(Container container) throws IOException, InterruptedException { + return id(container.getContainerName()); + } + + @Override + public String config() throws IOException, InterruptedException { + return command.execute(Command.throwingOnError(), true, "config"); + } + + @Override + public List services() throws IOException, InterruptedException { + String servicesOutput = command.execute(Command.throwingOnError(), true, "config", "--services"); + return Arrays.asList(servicesOutput.split("(\r|\n)+")); + } + + /** + * Blocks until all logs collected from the container. + * @return Whether the docker container terminated prior to log collection ending + */ + @Override + public boolean writeLogs(String container, OutputStream output) throws IOException { + try { + Awaitility.await() + .pollInterval(50, TimeUnit.MILLISECONDS) + .atMost(LOG_WAIT_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS) + .until(() -> exists(container)); + Process executedProcess = followLogs(container); + IOUtils.copy(executedProcess.getInputStream(), output); + executedProcess.waitFor(COMMAND_TIMEOUT.getMillis(), MILLISECONDS); + } catch (InterruptedException e) { + return false; + } + return true; + } + + private boolean exists(final String containerName) throws IOException, InterruptedException { + return id(containerName).orElse(null) != null; + } + + private Optional id(String containerName) throws IOException, InterruptedException { + String id = command.execute(Command.throwingOnError(), true, "ps", "-q", containerName); + if (id.isEmpty()) { + return Optional.empty(); + } + return Optional.of(id); + } + + private Process followLogs(String container) throws IOException, InterruptedException { + if (version().isHigherThanOrEquivalentTo(VERSION_1_7_0)) { + return rawExecutable.execute(true, "logs", "--no-color", "--follow", container); + } + + return rawExecutable.execute(true, "logs", "--no-color", container); + } + + @Override + public Ports ports(String service) throws IOException, InterruptedException { + return Ports.parseFromDockerComposePs(psOutput(service), dockerMachine.getIp()); + } + + private static ErrorHandler swallowingDownCommandDoesNotExist() { + return (exitCode, output, commandName, commands) -> { + if (downCommandWasPresent(output)) { + Command.throwingOnError().handle(exitCode, output, commandName, commands); + } + + log.warn("It looks like `docker-compose down` didn't work."); + log.warn("This probably means your version of docker-compose doesn't support the `down` command"); + log.warn("Updating to version 1.6+ of docker-compose is likely to fix this issue."); + }; + } + + private static boolean downCommandWasPresent(String output) { + return !output.contains("No such command"); + } + + private String psOutput(String service) throws IOException, InterruptedException { + String psOutput = command.execute(Command.throwingOnError(), true, "ps", service); + validState(StringUtils.hasText(psOutput), "No container with name '" + service + "' found"); + return psOutput; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DelegatingDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DelegatingDockerCompose.java new file mode 100644 index 0000000000..122ca51357 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DelegatingDockerCompose.java @@ -0,0 +1,130 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.List; +import java.util.Optional; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; + +abstract class DelegatingDockerCompose implements DockerCompose { + private final DockerCompose dockerCompose; + + protected DelegatingDockerCompose(DockerCompose dockerCompose) { + this.dockerCompose = dockerCompose; + } + + @Override + public void pull() throws IOException, InterruptedException { + dockerCompose.pull(); + } + + @Override + public void build() throws IOException, InterruptedException { + dockerCompose.build(); + } + + @Override + public void up() throws IOException, InterruptedException { + dockerCompose.up(); + } + + @Override + public void down() throws IOException, InterruptedException { + dockerCompose.down(); + } + + @Override + public void kill() throws IOException, InterruptedException { + dockerCompose.kill(); + } + + @Override + public void rm() throws IOException, InterruptedException { + dockerCompose.rm(); + } + + @Override + public void up(Container container) throws IOException, InterruptedException { + dockerCompose.up(container); + } + + @Override + public void start(Container container) throws IOException, InterruptedException { + dockerCompose.start(container); + } + + @Override + public void stop(Container container) throws IOException, InterruptedException { + dockerCompose.stop(container); + } + + @Override + public void kill(Container container) throws IOException, InterruptedException { + dockerCompose.kill(container); + } + + @Override + public String exec(DockerComposeExecOption dockerComposeExecOption, String containerName, + DockerComposeExecArgument dockerComposeExecArgument) throws IOException, InterruptedException { + return dockerCompose.exec(dockerComposeExecOption, containerName, dockerComposeExecArgument); + } + + @Override + public String run(DockerComposeRunOption dockerComposeRunOption, String containerName, + DockerComposeRunArgument dockerComposeRunArgument) throws IOException, InterruptedException { + return dockerCompose.run(dockerComposeRunOption, containerName, dockerComposeRunArgument); + } + + @Override + public List ps() throws IOException, InterruptedException { + return dockerCompose.ps(); + } + + @Override + public Optional id(Container container) throws IOException, InterruptedException { + return dockerCompose.id(container); + } + + @Override + public String config() throws IOException, InterruptedException { + return dockerCompose.config(); + } + + @Override + public List services() throws IOException, InterruptedException { + return dockerCompose.services(); + } + + @Override + public boolean writeLogs(String container, OutputStream output) throws IOException { + return dockerCompose.writeLogs(container, output); + } + + @Override + public Ports ports(String service) throws IOException, InterruptedException { + return dockerCompose.ports(service); + } + + protected final DockerCompose getDockerCompose() { + return dockerCompose; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Docker.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Docker.java new file mode 100644 index 0000000000..49dea25820 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Docker.java @@ -0,0 +1,94 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import com.github.zafarkhaja.semver.Version; +import org.apache.commons.lang3.SystemUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.State; +import org.springframework.util.Assert; + +public class Docker { + + private static final Logger log = LoggerFactory.getLogger(Docker.class); + + // Without java escape characters: ^(\d+)\.(\d+)\.(\d+)(?:-.*)?$ + private static final Pattern VERSION_PATTERN = Pattern.compile("^Docker version (\\d+)\\.(\\d+)\\.(\\d+)(?:-.*)?$"); + private static final String HEALTH_STATUS_FORMAT = "--format=" + "{{if not .State.Running}}DOWN" + + "{{else if .State.Paused}}PAUSED" + "{{else if index .State \"Health\"}}" + + "{{if eq .State.Health.Status \"healthy\"}}HEALTHY" + "{{else}}UNHEALTHY{{end}}" + + "{{else}}HEALTHY{{end}}"; + private static final String HEALTH_STATUS_FORMAT_WINDOWS = HEALTH_STATUS_FORMAT.replaceAll("\"", "`\""); + + public static Version version() throws IOException, InterruptedException { + return new Docker(DockerExecutable.builder().dockerConfiguration(DockerMachine.localMachine().build()).build()) + .configuredVersion(); + } + + public Version configuredVersion() throws IOException, InterruptedException { + String versionString = command.execute(Command.throwingOnError(), false, "-v"); + Matcher matcher = VERSION_PATTERN.matcher(versionString); + Assert.state(matcher.matches(), "Unexpected output of docker -v: " + versionString); + return Version.of(Integer.parseInt(matcher.group(1)), Integer.parseInt(matcher.group(2)), + Integer.parseInt(matcher.group(3))); + } + + private final Command command; + + public Docker(DockerExecutable rawExecutable) { + this.command = new Command(rawExecutable, log::trace); + } + + public State state(String containerId) throws IOException, InterruptedException { + String formatString = SystemUtils.IS_OS_WINDOWS ? HEALTH_STATUS_FORMAT_WINDOWS : HEALTH_STATUS_FORMAT; + String stateString = command.execute(Command.throwingOnError(), false,"inspect", formatString, containerId); + return State.valueOf(stateString); + } + + public void rm(Collection containerNames) throws IOException, InterruptedException { + rm(containerNames.toArray(new String[containerNames.size()])); + } + + public void rm(String... containerNames) throws IOException, InterruptedException { + List commands = new ArrayList<>(); + commands.add("rm"); + commands.add("-f"); + if (containerNames != null) { + for (String containerName : containerNames) { + commands.add(containerName); + } + } + command.execute(Command.throwingOnError(), false, commands.toArray(new String[0])); + } + + public String listNetworks() throws IOException, InterruptedException { + return command.execute(Command.throwingOnError(), false, "network", "ls"); + } + + public String pruneNetworks() throws IOException, InterruptedException { + return command.execute(Command.throwingOnError(), false,"network", "prune", "--force"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocations.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocations.java new file mode 100644 index 0000000000..dea8a12f93 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocations.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.File; +import java.util.List; +import java.util.Optional; +import java.util.function.Predicate; + +import static java.util.Arrays.asList; + +public class DockerCommandLocations { + private static final Predicate IS_NOT_NULL = path -> path != null; + private static final Predicate FILE_EXISTS = path -> new File(path).exists(); + + private final List possiblePaths; + + public DockerCommandLocations(String... possiblePaths) { + this.possiblePaths = asList(possiblePaths); + } + + public Optional preferredLocation() { + + return possiblePaths.stream() + .filter(IS_NOT_NULL) + .filter(FILE_EXISTS) + .findFirst(); + } + + @Override + public String toString() { + return "DockerCommandLocations{possiblePaths=" + possiblePaths + "}"; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCompose.java new file mode 100644 index 0000000000..1d9f1d41ab --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCompose.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.List; +import java.util.Optional; + +import com.github.zafarkhaja.semver.Version; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; + +public interface DockerCompose { + static Version version() throws IOException, InterruptedException { + return DockerComposeExecutable.version(); + } + void pull() throws IOException, InterruptedException; + void build() throws IOException, InterruptedException; + void up() throws IOException, InterruptedException; + void down() throws IOException, InterruptedException; + void kill() throws IOException, InterruptedException; + void rm() throws IOException, InterruptedException; + void up(Container container) throws IOException, InterruptedException; + void start(Container container) throws IOException, InterruptedException; + void stop(Container container) throws IOException, InterruptedException; + void kill(Container container) throws IOException, InterruptedException; + String exec(DockerComposeExecOption dockerComposeExecOption, String containerName, DockerComposeExecArgument dockerComposeExecArgument) throws IOException, InterruptedException; + String run(DockerComposeRunOption dockerComposeRunOption, String containerName, DockerComposeRunArgument dockerComposeRunArgument) throws IOException, InterruptedException; + List ps() throws IOException, InterruptedException; + Optional id(Container container) throws IOException, InterruptedException; + String config() throws IOException, InterruptedException; + List services() throws IOException, InterruptedException; + boolean writeLogs(String container, OutputStream output) throws IOException; + Ports ports(String service) throws IOException, InterruptedException; +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecArgument.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecArgument.java new file mode 100644 index 0000000000..b24369ca9a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecArgument.java @@ -0,0 +1,66 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.List; + +public class DockerComposeExecArgument { + + private List arguments; + + public DockerComposeExecArgument(List arguments) { + this.arguments = arguments; + } + + public List arguments() { + return arguments; + } + + public static DockerComposeExecArgument of(List arguments) { + return new DockerComposeExecArgument(arguments); + } + + public static DockerComposeExecArgument arguments(String... arguments) { + return DockerComposeExecArgument.of(Arrays.asList(arguments)); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((arguments == null) ? 0 : arguments.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeExecArgument other = (DockerComposeExecArgument) obj; + if (arguments == null) { + if (other.arguments != null) + return false; + } else if (!arguments.equals(other.arguments)) + return false; + return true; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOption.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOption.java new file mode 100644 index 0000000000..61940d6b58 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOption.java @@ -0,0 +1,71 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class DockerComposeExecOption { + + private List options; + + public DockerComposeExecOption(List options) { + this.options = options; + } + + public List options() { + return options; + } + + public static DockerComposeExecOption options(String... options) { + return DockerComposeExecOption.of(Arrays.asList(options)); + } + + private static DockerComposeExecOption of(List asList) { + return new DockerComposeExecOption(asList); + } + + public static DockerComposeExecOption noOptions() { + return DockerComposeExecOption.of(Collections.emptyList()); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((options == null) ? 0 : options.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeExecOption other = (DockerComposeExecOption) obj; + if (options == null) { + if (other.options != null) + return false; + } else if (!options.equals(other.options)) + return false; + return true; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecutable.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecutable.java new file mode 100644 index 0000000000..459a8b59bd --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecutable.java @@ -0,0 +1,167 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.github.zafarkhaja.semver.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; + +public class DockerComposeExecutable implements Executable { + + private static final Logger log = LoggerFactory.getLogger(DockerComposeExecutable.class); + + private static final DockerCommandLocations DOCKER_COMPOSE_LOCATIONS = new DockerCommandLocations( + System.getenv("DOCKER_COMPOSE_LOCATION"), + "/usr/local/bin/docker-compose", + "/usr/bin/docker-compose", + "/usr/local/bin/docker", + "/usr/bin/docker" + ); + + private static String defaultDockerComposePath() { + String pathToUse = DOCKER_COMPOSE_LOCATIONS.preferredLocation() + .orElseThrow(() -> new IllegalStateException( + "Could not find docker-compose or docker, looked in: " + DOCKER_COMPOSE_LOCATIONS)); + + log.debug("Using docker-compose found at " + pathToUse); + return pathToUse; + } + + static Version version() throws IOException, InterruptedException { + Command dockerCompose = new Command(new Executable() { + + @Override + public String commandName() { + return defaultDockerComposePath(); + } + + @Override + public Process execute(boolean composeCommand, String... commands) throws IOException { + List args = new ArrayList<>(); + String dockerComposePath = defaultDockerComposePath(); + args.add(dockerComposePath); + if(commandName().equals("docker")) { + args.add("compose"); + } + args.addAll(Arrays.asList(commands)); + log.debug("execute:{}", args); + return new ProcessBuilder(args).redirectErrorStream(true).start(); + } + }, log::debug); + + String versionOutput = dockerCompose.execute(Command.throwingOnError(), false, "-v"); + return DockerComposeVersion.parseFromDockerComposeVersion(versionOutput); + } + + private DockerComposeFiles dockerComposeFiles; + + private DockerConfiguration dockerConfiguration; + + private ProjectName projectName = ProjectName.random(); + + public DockerComposeExecutable(DockerComposeFiles dockerComposeFiles, DockerConfiguration dockerConfiguration, ProjectName projectName) { + this.dockerComposeFiles = dockerComposeFiles; + this.dockerConfiguration = dockerConfiguration; + if (projectName != null) { + this.projectName = projectName; + } + } + + public DockerComposeFiles dockerComposeFiles() { + return dockerComposeFiles; + } + + public DockerConfiguration dockerConfiguration() { + return dockerConfiguration; + } + + public ProjectName projectName() { + return projectName; +// return projectName != null ? projectName : ProjectName.random(); + } + + @Override + public final String commandName() { + return defaultDockerComposePath().endsWith("/docker") ? "docker" : "docker-compose"; + } + + protected String dockerComposePath() { + return defaultDockerComposePath(); + } + + @Override + public Process execute(boolean composeCommand, String... commands) throws IOException { + DockerForMacHostsIssue.issueWarning(); + + List args = new ArrayList<>(); + args.add(dockerComposePath()); + if (composeCommand && commandName().equalsIgnoreCase("docker")) { + args.add("compose"); + } + // if a single option is provided that starts with - skips the file commands. + if (commands.length > 1 || commands[0].charAt(0) != '-') { + args.addAll(projectName().constructComposeFileCommand()); + args.addAll(dockerComposeFiles().constructComposeFileCommand()); + } + args.addAll(Arrays.asList(commands)); + + log.debug("execute:{}", args); + return dockerConfiguration().configuredDockerComposeProcess() + .command(args) + .redirectErrorStream(true) + .start(); + } + + public static Builder builder() { + return new Builder(); + } + + + public static class Builder { + private DockerComposeFiles dockerComposeFiles; + + private DockerConfiguration dockerConfiguration; + + private ProjectName projectName; + + public Builder dockerComposeFiles(DockerComposeFiles dockerComposeFiles) { + this.dockerComposeFiles = dockerComposeFiles; + return this; + } + + public Builder dockerConfiguration(DockerConfiguration dockerConfiguration) { + this.dockerConfiguration = dockerConfiguration; + return this; + } + + public Builder projectName(ProjectName projectName) { + this.projectName = projectName; + return this; + } + + public DockerComposeExecutable build() { + return new DockerComposeExecutable(dockerComposeFiles, dockerConfiguration, projectName); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunArgument.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunArgument.java new file mode 100644 index 0000000000..41f12899e6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunArgument.java @@ -0,0 +1,66 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.List; + +public class DockerComposeRunArgument { + + private List arguments; + + public DockerComposeRunArgument(List arguments) { + this.arguments = arguments; + } + + public List arguments() { + return arguments; + } + + public static DockerComposeRunArgument arguments(String... arguments) { + return DockerComposeRunArgument.of(Arrays.asList(arguments)); + } + + private static DockerComposeRunArgument of(List asList) { + return new DockerComposeRunArgument(asList); + } + + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((arguments == null) ? 0 : arguments.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeRunArgument other = (DockerComposeRunArgument) obj; + if (arguments == null) { + if (other.arguments != null) + return false; + } else if (!arguments.equals(other.arguments)) + return false; + return true; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunOption.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunOption.java new file mode 100644 index 0000000000..31280c2f8a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeRunOption.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.util.Arrays; +import java.util.List; + +public class DockerComposeRunOption { + + private List options; + + public DockerComposeRunOption(List options) { + this.options = options; + } + + public List options() { + return options; + } + + public static DockerComposeRunOption options(String... options) { + return DockerComposeRunOption.of(Arrays.asList(options)); + } + + private static DockerComposeRunOption of(List asList) { + return new DockerComposeRunOption(asList); + } + + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((options == null) ? 0 : options.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DockerComposeRunOption other = (DockerComposeRunOption) obj; + if (options == null) { + if (other.options != null) + return false; + } else if (!options.equals(other.options)) + return false; + return true; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersion.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersion.java new file mode 100644 index 0000000000..53ef1c4fc5 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersion.java @@ -0,0 +1,59 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import com.github.zafarkhaja.semver.Version; + +import org.springframework.util.StringUtils; + +public final class DockerComposeVersion { + + private DockerComposeVersion() { + } + + //docker-compose version format is like 1.7.0rc1, which can't be parsed by java-semver + //here we only pass 1.7.0 to java-semver + public static Version parseFromDockerComposeVersion(String versionOutput) { + String[] splitOnSeparator = versionOutput.split(" "); + String version = null; + for (String value : splitOnSeparator) { + if(value.length() == 0) { + continue; + } + if (Character.isDigit(value.charAt(0))) { + version = value; + break; + } else if (value.charAt(0) == 'v' && value.length() > 1 && Character.isDigit(value.charAt(1))) { + version = value.substring(1); + } + if(StringUtils.hasLength(version)) { + break; + } + } + if(!StringUtils.hasText(version)) { + throw new RuntimeException("Unknown version:" + versionOutput); + } + StringBuilder builder = new StringBuilder(); + for (int i = 0; i < version.length(); i++) { + if (version.charAt(i) >= '0' && version.charAt(i) <= '9' || version.charAt(i) == '.') { + builder.append(version.charAt(i)); + } else { + return Version.parse(builder.toString()); + } + } + return Version.parse(builder.toString()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerConfiguration.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerConfiguration.java new file mode 100644 index 0000000000..24bd548fee --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerConfiguration.java @@ -0,0 +1,20 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +public interface DockerConfiguration { + ProcessBuilder configuredDockerComposeProcess(); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutable.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutable.java new file mode 100644 index 0000000000..54534d4600 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutable.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DockerExecutable implements Executable { + private static final Logger log = LoggerFactory.getLogger(DockerExecutable.class); + + private static final DockerCommandLocations DOCKER_LOCATIONS = new DockerCommandLocations( + System.getenv("DOCKER_LOCATION"), + "/usr/local/bin/docker", + "/usr/bin/docker" + ); + + private DockerConfiguration dockerConfiguration; + + public DockerExecutable(DockerConfiguration dockerConfiguration) { + this.dockerConfiguration = dockerConfiguration; + } + + public DockerConfiguration dockerConfiguration() { + return dockerConfiguration; + } + + @Override + public final String commandName() { + return "docker"; + } + + protected String dockerPath() { + String pathToUse = DOCKER_LOCATIONS.preferredLocation() + .orElseThrow(() -> new IllegalStateException( + "Could not find docker, looked in: " + DOCKER_LOCATIONS)); + + log.debug("Using docker found at " + pathToUse); + + return pathToUse; + } + + @Override + public Process execute(boolean composeCommand, String... commands) throws IOException { + List args = new ArrayList<>(); + args.add(dockerPath()); + if(composeCommand) { + args.add("compose"); + } + args.addAll(Arrays.asList(commands)); + + return dockerConfiguration().configuredDockerComposeProcess() + .command(args) + .redirectErrorStream(true) + .start(); + } + + public static DockerExecutable.Builder builder() { + return new Builder(); + } + + public static class Builder { + + private DockerConfiguration dockerConfiguration; + + public Builder dockerConfiguration(DockerConfiguration dockerConfiguration) { + this.dockerConfiguration = dockerConfiguration; + return this; + } + + public DockerExecutable build() { + return new DockerExecutable(dockerConfiguration); + } + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutionException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutionException.java new file mode 100644 index 0000000000..b765fb3636 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerExecutionException.java @@ -0,0 +1,25 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +public class DockerExecutionException extends RuntimeException { + public DockerExecutionException() { + } + + public DockerExecutionException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerForMacHostsIssue.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerForMacHostsIssue.java new file mode 100644 index 0000000000..1c2fa4a3a7 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerForMacHostsIssue.java @@ -0,0 +1,68 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.File; +import java.io.IOException; + +import org.springframework.util.FileCopyUtils; + +/** + * Check whether Mac OS X users have pointed localunixsocket to localhost. + * + *

docker-compose takes an order of magnitude longer to run commands without this tip! + * + * @see Docker Compose Issue #3419 + */ +public class DockerForMacHostsIssue { + + private static final String REDIRECT_LINE = "127.0.0.1 localunixsocket\n"; + private static final String WARNING_MESSAGE = "\n\n **** WARNING: Your tests may be slow ****\n" + + "Please add the following line to /etc/hosts:\n " + + REDIRECT_LINE + + "\nFor more information, see https://github.com/docker/compose/issues/3419#issuecomment-221793401\n\n"; + private static volatile boolean checkPerformed = false; + + @SuppressWarnings("checkstyle:BanSystemErr") + public static void issueWarning() { + if (!checkPerformed) { + if (onMacOsX() && !localunixsocketRedirectedInEtcHosts()) { + System.err.print(WARNING_MESSAGE); + } + } + checkPerformed = true; + } + + private static boolean onMacOsX() { + return System.getProperty("os.name", "generic").equals("Mac OS X"); + } + + private static boolean localunixsocketRedirectedInEtcHosts() { + try { + byte[] bytes = FileCopyUtils.copyToByteArray(new File("/etc/hosts")); + String content = new String(bytes); + return content.contains(REDIRECT_LINE); + } catch (IOException e) { + return true; // Better to be silent than issue false warnings + } + } + + public static void main(String[] args) { + issueWarning(); + } + + private DockerForMacHostsIssue() {} +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ErrorHandler.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ErrorHandler.java new file mode 100644 index 0000000000..cd5269fe85 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ErrorHandler.java @@ -0,0 +1,21 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +@FunctionalInterface +public interface ErrorHandler { + void handle(int exitCode, String output, String commandName, String... commands); +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Executable.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Executable.java new file mode 100644 index 0000000000..0c44c551c6 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Executable.java @@ -0,0 +1,26 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; + +public interface Executable { + + String commandName(); + + Process execute(boolean composeCommand, String... commands) throws IOException; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategy.java new file mode 100644 index 0000000000..739def945c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategy.java @@ -0,0 +1,40 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +/** + * Send SIGTERM to containers first, allowing them up to 10 seconds to + * terminate before killing and rm-ing them. + */ +public class GracefulShutdownStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(GracefulShutdownStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) throws IOException, InterruptedException { + log.debug("Killing docker-compose cluster"); + dockerCompose.down(); + dockerCompose.kill(); + dockerCompose.rm(); + docker.pruneNetworks(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategy.java new file mode 100644 index 0000000000..3af89427f8 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategy.java @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +/** + * Shuts down fast but cleanly by issuing a kill (fast shutdown) followed by a down (thorough cleanup) + * + *

"down" would be ideal as a single command if it didn't first execute an impotent SIGTERM, which + * many Docker images simply ignore due to being run by bash as process 1. We don't need a graceful + * shutdown period anyway since the tests are done and we're destroying the docker image. + */ +public class KillDownShutdownStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(KillDownShutdownStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) + throws IOException, InterruptedException { + log.debug("Killing docker-compose cluster"); + dockerCompose.kill(); + log.debug("Downing docker-compose cluster"); + dockerCompose.down(); + log.debug("docker-compose cluster killed"); + docker.pruneNetworks(); + log.debug("Networks pruned"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ProcessResult.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ProcessResult.java new file mode 100644 index 0000000000..15051f5d0c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ProcessResult.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +public class ProcessResult { + private int exitCode; + private final String output; + + public ProcessResult(int exitCode, String output) { + this.exitCode = exitCode; + this.output = output; + } + + public int exitCode() { + return exitCode; + } + + public String output() { + return output; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Retryer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Retryer.java new file mode 100644 index 0000000000..817df8b7b3 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/Retryer.java @@ -0,0 +1,58 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import org.joda.time.Duration; +import org.joda.time.ReadableDuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Retryer { + private static final Logger log = LoggerFactory.getLogger(Retryer.class); + public static final ReadableDuration STANDARD_DELAY = Duration.standardSeconds(5); + + public interface RetryableDockerOperation { + T call() throws IOException, InterruptedException; + } + + private final int retryAttempts; + private final ReadableDuration delay; + + public Retryer(int retryAttempts, ReadableDuration delay) { + this.retryAttempts = retryAttempts; + this.delay = delay; + } + + public T runWithRetries(RetryableDockerOperation operation) throws IOException, InterruptedException { + DockerExecutionException lastExecutionException = null; + for (int i = 0; i <= retryAttempts; i++) { + try { + return operation.call(); + } catch (DockerExecutionException e) { + lastExecutionException = e; + log.warn("Caught exception: {}", e.getMessage()); + log.warn("Retrying after {}", delay); + if (i < retryAttempts) { + Thread.sleep(delay.getMillis()); + } + } + } + + log.error("Exhausted all retry attempts. Tried {} times.", retryAttempts); + throw lastExecutionException; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerCompose.java new file mode 100644 index 0000000000..64b0a62cb4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerCompose.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.List; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; + +public class RetryingDockerCompose extends DelegatingDockerCompose { + private final Retryer retryer; + + public RetryingDockerCompose(int retryAttempts, DockerCompose dockerCompose) { + this(new Retryer(retryAttempts, Retryer.STANDARD_DELAY), dockerCompose); + } + + public RetryingDockerCompose(Retryer retryer, DockerCompose dockerCompose) { + super(dockerCompose); + this.retryer = retryer; + } + + @Override + public void up() throws IOException, InterruptedException { + retryer.runWithRetries(() -> { + super.up(); + return null; + }); + } + + @Override + public List ps() throws IOException, InterruptedException { + return retryer.runWithRetries(super::ps); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/SkipShutdownStrategy.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/SkipShutdownStrategy.java new file mode 100644 index 0000000000..d61e34a861 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/SkipShutdownStrategy.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +public class SkipShutdownStrategy implements ShutdownStrategy { + + private static final Logger log = LoggerFactory.getLogger(SkipShutdownStrategy.class); + + @Override + public void shutdown(DockerCompose dockerCompose, Docker docker) { + log.warn("\n" + + "******************************************************************************************\n" + + "* docker-compose-rule has been configured to skip docker-compose shutdown: *\n" + + "* this means the containers will be left running after tests finish executing. *\n" + + "* If you see this message when running on CI it means you are potentially abandoning *\n" + + "* long running processes and leaking resources. *\n" + + "******************************************************************************************"); + } + + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/DoNothingLogCollector.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/DoNothingLogCollector.java new file mode 100644 index 0000000000..24e78e7dd9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/DoNothingLogCollector.java @@ -0,0 +1,32 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class DoNothingLogCollector implements LogCollector { + + @Override + public void startCollecting(DockerCompose dockerCompose) { + + } + + @Override + public void stopCollecting() { + + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollector.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollector.java new file mode 100644 index 0000000000..03249e88ac --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollector.java @@ -0,0 +1,100 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.FileAlreadyExistsException; +import java.nio.file.Files; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import org.apache.commons.lang3.Validate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.util.Assert; + +public class FileLogCollector implements LogCollector { + + private static final Logger log = LoggerFactory.getLogger(FileLogCollector.class); + + private static final long STOP_TIMEOUT_IN_MILLIS = 50; + + private final File logDirectory; + + private ExecutorService executor = null; + + public FileLogCollector(File logDirectory) { + Assert.state(!logDirectory.isFile(), "Log directory cannot be a file"); + if (!logDirectory.exists()) { + Validate.isTrue(logDirectory.mkdirs(), "Error making log directory: " + logDirectory.getAbsolutePath()); + } + this.logDirectory = logDirectory; + } + + public static LogCollector fromPath(String path) { + return new FileLogCollector(new File(path)); + } + + @Override + public synchronized void startCollecting(DockerCompose dockerCompose) throws IOException, InterruptedException { + if (executor != null) { + throw new RuntimeException("Cannot start collecting the same logs twice"); + } + + List serviceNames = dockerCompose.services(); + if (serviceNames.size() == 0) { + return; + } + executor = Executors.newFixedThreadPool(serviceNames.size()); + serviceNames.stream().forEachOrdered(service -> this.collectLogs(service, dockerCompose)); + } + + private void collectLogs(String container, DockerCompose dockerCompose) { + executor.submit(() -> { + File outputFile = new File(logDirectory, container + ".log"); + try { + Files.createFile(outputFile.toPath()); + } catch (final FileAlreadyExistsException e) { + // ignore + } catch (final IOException e) { + throw new RuntimeException("Error creating log file", e); + } + log.info("Writing logs for container '{}' to '{}'", container, outputFile.getAbsolutePath()); + try (FileOutputStream outputStream = new FileOutputStream(outputFile)) { + dockerCompose.writeLogs(container, outputStream); + } catch (IOException e) { + throw new RuntimeException("Error reading log", e); + } + }); + } + + @Override + public synchronized void stopCollecting() throws InterruptedException { + if (executor == null) { + return; + } + if (!executor.awaitTermination(STOP_TIMEOUT_IN_MILLIS, TimeUnit.MILLISECONDS)) { + log.warn("docker containers were still running when log collection stopped"); + executor.shutdownNow(); + } + executor = null; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogCollector.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogCollector.java new file mode 100644 index 0000000000..c6d5c7b8ba --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogCollector.java @@ -0,0 +1,28 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.io.IOException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public interface LogCollector { + + void startCollecting(DockerCompose dockerCompose) throws IOException, InterruptedException; + + void stopCollecting() throws InterruptedException; + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectory.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectory.java new file mode 100644 index 0000000000..17485a851c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectory.java @@ -0,0 +1,53 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.util.Optional; + +public class LogDirectory { + + private LogDirectory() {} + + /** + * For tests running on CircleCI, save logs into $CIRCLE_ARTIFACTS/dockerLogs/<testClassName>. + * This ensures partial logs can be recovered if the build is cancelled or times out, and + * also avoids needless copying. + * + * Otherwise, save logs from local runs to a folder inside $project/build/dockerLogs named + * after the test class. + * + * @param testClass the JUnit test class whose name will appear on the log folder + * @return log directory + */ + public static String circleAwareLogDirectory(Class testClass) { + return circleAwareLogDirectory(testClass.getSimpleName()); + } + + public static String circleAwareLogDirectory(String logDirectoryName) { + String artifactRoot = Optional.ofNullable(System.getenv("CIRCLE_ARTIFACTS")).orElse("build"); + return artifactRoot + "/dockerLogs/" + logDirectoryName; + } + + /** + * Save logs into a new folder, $project/build/dockerLogs/<testClassName>. + * + * @param testClass the JUnit test class whose name will appear on the log folder + * @return log directory + */ + public static String gradleDockerLogsDirectory(Class testClass) { + return "build/dockerLogs/" + testClass.getSimpleName(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcher.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcher.java new file mode 100644 index 0000000000..1409ae5d37 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcher.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import java.util.Collection; +import java.util.stream.Collectors; +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +public class AvailablePortMatcher extends TypeSafeMatcher> { + + @Override + public void describeTo(Description description) { + description.appendText("No ports to be unavailable"); + } + + @Override + protected boolean matchesSafely(Collection unavailablePorts) { + return unavailablePorts.isEmpty(); + } + + @Override + protected void describeMismatchSafely(Collection unavailablePorts, Description mismatchDescription) { + mismatchDescription.appendValueList("These ports were unavailable:\n", "\n", ".", buildClosedPortsErrorMessage(unavailablePorts)); + } + + private static Collection buildClosedPortsErrorMessage(Collection unavailablePorts) { + return unavailablePorts.stream() + .map(port -> "For host with ip address: " + port.getIp() + " external port '" + port.getExternalPort() + "' mapped to internal port '" + port.getInternalPort() + "' was unavailable") + .collect(Collectors.toList()); + } + + public static AvailablePortMatcher areAvailable() { + return new AvailablePortMatcher(); + } + +} diff --git a/spring-cloud-dataflow-shell/.jdk8 b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/resources/application.properties similarity index 100% rename from spring-cloud-dataflow-shell/.jdk8 rename to spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/resources/application.properties diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/TestContainerNames.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/TestContainerNames.java new file mode 100644 index 0000000000..2bb20047a9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/TestContainerNames.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose; + +import static java.util.stream.Collectors.toList; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; + +public class TestContainerNames { + + private TestContainerNames() {} + + public static List of(String... semanticNames) { + return Arrays.stream(semanticNames) + .map(TestContainerNames::testContainerName) + .collect(toList()); + } + + private static ContainerName testContainerName(String testName) { + return ContainerName.builder() + .semanticName(testName) + .rawName("123456_" + testName + "_1") + .build(); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidatorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidatorTests.java new file mode 100644 index 0000000000..a9de581835 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidatorTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.AdditionalEnvironmentValidator; + +public class AdditionalEnvironmentValidatorTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_when_additional_environment_variables_contain_docker_variables() { + Map variables = new HashMap<>(); + variables.put("DOCKER_HOST", "tcp://some-host:2376"); + variables.put("SOME_VARIABLE", "Some Value"); + exception.expect(IllegalStateException.class); + exception.expectMessage("The following variables"); + exception.expectMessage("DOCKER_HOST"); + exception.expectMessage("cannot exist in your additional environment"); + AdditionalEnvironmentValidator.validate(variables); + } + + @Test + public void validate_arbitrary_environment_variables() { + Map variables = new HashMap<>(); + variables.put("SOME_VARIABLE", "Some Value"); + + assertThat(AdditionalEnvironmentValidator.validate(variables)).isEqualTo(variables); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidatorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidatorTests.java new file mode 100644 index 0000000000..16ecf3a3d9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidatorTests.java @@ -0,0 +1,59 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonEnvironmentValidator; + +public class DaemonEnvironmentValidatorTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void validate_successfully_when_docker_environment_does_not_contain_docker_variables() { + Map variables = new HashMap<>(); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + variables.put("ANOTHER_VARIABLE", "ANOTHER_VALUE"); + + DaemonEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void throw_exception_when_docker_environment_contains_illegal_docker_variables() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + variables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("These variables were set:"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage(DOCKER_CERT_PATH); + exception.expectMessage(DOCKER_TLS_VERIFY); + exception.expectMessage("They cannot be set when connecting to a local docker daemon"); + DaemonEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolverTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolverTests.java new file mode 100644 index 0000000000..9933d71ab5 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolverTests.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonHostIpResolver.LOCALHOST; + +import org.junit.Test; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonHostIpResolver; + +public class DaemonHostIpResolverTests { + + @Test + public void return_local_host_with_null() { + assertThat(new DaemonHostIpResolver().resolveIp(null)).isEqualTo(LOCALHOST); + } + + @Test + public void return_local_host_with_blank() { + assertThat(new DaemonHostIpResolver().resolveIp("")).isEqualTo(LOCALHOST); + } + + @Test + public void return_local_host_with_arbitrary() { + assertThat(new DaemonHostIpResolver().resolveIp("arbitrary")).isEqualTo(LOCALHOST); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFilesTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFilesTests.java new file mode 100644 index 0000000000..49f4a15d61 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFilesTests.java @@ -0,0 +1,127 @@ +/* + * Copyright 2018-2020 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.io.File; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.assertj.core.api.Condition; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +public class DockerComposeFilesTests { + + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + + @Rule + public final ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_when_compose_file_is_not_specified() { + exception.expect(IllegalStateException.class); + exception.expectMessage("A docker compose file must be specified."); + DockerComposeFiles.from(); + } + + @Test + public void throw_exception_when_compose_file_does_not_exist() { + exception.expect(IllegalStateException.class); + exception.expectMessage("The following docker-compose files:"); + exception.expectMessage("does-not-exist.yaml"); + exception.expectMessage("do not exist."); + DockerComposeFiles.from("does-not-exist.yaml"); + } + + @Test + public void + throw_correct_exception_when_there_is_a_single_missing_compose_file_with_an_existing_compose_file() + throws Exception { + + assertThatThrownBy(() -> { + File composeFile = tempFolder.newFile("docker-compose.yaml"); + DockerComposeFiles.from("does-not-exist.yaml", composeFile.getAbsolutePath()); + }).isInstanceOf(IllegalStateException.class) + .hasMessageContaining("The following docker-compose files:") + .hasMessageContaining("does-not-exist.yaml") + .hasMessageContaining("do not exist.") + .hasMessageNotContaining("docker-compose.yaml"); + } + + @Test + public void generate_docker_compose_file_command_correctly_for_single_compose_file() throws Exception { + File composeFile = tempFolder.newFile("docker-compose.yaml"); + DockerComposeFiles dockerComposeFiles = DockerComposeFiles.from(composeFile.getAbsolutePath()); + assertThat(dockerComposeFiles.constructComposeFileCommand()).containsExactly("--file", + composeFile.getAbsolutePath()); + } + + @Test + public void generate_docker_compose_file_command_correctly_for_multiple_compose_files() throws Exception { + File composeFile1 = tempFolder.newFile("docker-compose1.yaml"); + File composeFile2 = tempFolder.newFile("docker-compose2.yaml"); + DockerComposeFiles dockerComposeFiles = DockerComposeFiles.from(composeFile1.getAbsolutePath(), composeFile2.getAbsolutePath()); + assertThat(dockerComposeFiles.constructComposeFileCommand()).containsExactly("--file", + composeFile1.getAbsolutePath(), "--file", composeFile2.getAbsolutePath()); + } + + @Test + public void testFromClasspathExist() { + DockerComposeFiles dockerComposeFiles = DockerComposeFiles.from("classpath:docker-compose-cp1.yaml", + "classpath:org/springframework/cloud/dataflow/common/test/docker/compose/docker-compose-cp2.yaml"); + assertThat(dockerComposeFiles.constructComposeFileCommand()).has(matchAll(is("--file"), + containsString("docker-compose-cp1.yaml"), + is("--file"), + containsString("docker-compose-cp2.yaml") + )); + } + + @Test + public void testFromClasspathDoesNotExist() { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("Can't find resource classpath:does-not-exist.yaml"); + DockerComposeFiles.from("classpath:does-not-exist.yaml"); + } + + private static Condition is(String value) { + return new Condition<>(s -> s.equals(value), "equals:" + value); + } + + private static Condition containsString(String value) { + return new Condition<>(s -> s.contains(value), "contains:" + value); + } + private static Condition> matchAll(Condition ... conditions) { + return new Condition<>(list -> { + assertThat(list).hasSize(conditions.length); + Iterator iterList = list.iterator(); + Iterator> conditionIterator = Arrays.asList(conditions).iterator(); + while(iterList.hasNext() && conditionIterator.hasNext()) { + Condition condition = conditionIterator.next(); + if(!condition.matches(iterList.next())) { + throw new AssertionError(condition.description()); + } + } + return true; + }, "matches all"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerTypeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerTypeTests.java new file mode 100644 index 0000000000..72b95ba088 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerTypeTests.java @@ -0,0 +1,54 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +public class DockerTypeTests { + + @Test + public void return_remote_as_first_valid_type_if_environment_is_illegal_for_daemon() { + + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + variables.put(DOCKER_CERT_PATH, "/path/to/certs"); + assertThat(DockerType.getFirstValidDockerTypeForEnvironment(variables)).isEqualTo(Optional.of(DockerType.REMOTE)); + } + + @Test + public void return_daemon_as_first_valid_type_if_environment_is_illegal_for_remote() { + Map variables = new HashMap<>(); + assertThat(DockerType.getFirstValidDockerTypeForEnvironment(variables)).isEqualTo(Optional.of(DockerType.DAEMON)); + } + + @Test + public void return_absent_as_first_valid_type_if_environment_is_illegal_for_all() { + Map variables = new HashMap<>(); + variables.put(DOCKER_TLS_VERIFY, "1"); + assertThat(DockerType.getFirstValidDockerTypeForEnvironment(variables)).isEqualTo(Optional.empty()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/MockDockerEnvironment.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/MockDockerEnvironment.java new file mode 100644 index 0000000000..3ce6ee1098 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/MockDockerEnvironment.java @@ -0,0 +1,94 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +public class MockDockerEnvironment { + + private final DockerCompose dockerComposeProcess; + + public MockDockerEnvironment(DockerCompose dockerComposeProcess) { + this.dockerComposeProcess = dockerComposeProcess; + } + + public DockerPort availableService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = port(service, ip, externalPortNumber, internalPortNumber); + doReturn(true).when(port).isListeningNow(); + return port; + } + + public DockerPort unavailableService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = port(service, ip, externalPortNumber, internalPortNumber); + doReturn(false).when(port).isListeningNow(); + return port; + } + + public DockerPort availableHttpService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = availableService(service, ip, externalPortNumber, internalPortNumber); + doReturn(true).when(port).isHttpResponding(any(), eq(false)); + doReturn(SuccessOrFailure.success()).when(port).isHttpRespondingSuccessfully(any(), eq(false)); + return port; + } + + public DockerPort unavailableHttpService(String service, String ip, int externalPortNumber, int internalPortNumber) throws Exception { + DockerPort port = availableService(service, ip, externalPortNumber, internalPortNumber); + doReturn(false).when(port).isHttpResponding(any(), eq(false)); + return port; + } + + public DockerPort port(String service, String ip, int externalPortNumber, int internalPortNumber) throws IOException, InterruptedException { + DockerPort port = dockerPortSpy(ip, externalPortNumber, internalPortNumber); + when(dockerComposeProcess.ports(service)).thenReturn(new Ports(port)); + return port; + } + + public void ephemeralPort(String service, String ip, int internalPortNumber) throws IOException, InterruptedException { + AtomicInteger currentExternalPort = new AtomicInteger(33700); + when(dockerComposeProcess.ports(service)).then(a -> { + DockerPort port = dockerPortSpy(ip, currentExternalPort.incrementAndGet(), internalPortNumber); + return new Ports(port); + }); + } + + public void ports(String service, String ip, Integer... portNumbers) throws IOException, InterruptedException { + List ports = Arrays.asList(portNumbers) + .stream() + .map(portNumber -> dockerPortSpy(ip, portNumber, portNumber)) + .collect(Collectors.toList()); + when(dockerComposeProcess.ports(service)).thenReturn(new Ports(ports)); + } + + private static DockerPort dockerPortSpy(String ip, int externalPortNumber, int internalPortNumber) { + DockerPort port = new DockerPort(ip, externalPortNumber, internalPortNumber); + return spy(port); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectNameTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectNameTests.java new file mode 100644 index 0000000000..52665f1888 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectNameTests.java @@ -0,0 +1,85 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import java.util.List; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.assertj.core.api.Assertions.assertThat; + +public class ProjectNameTests { + + @Rule + public final ExpectedException exception = ExpectedException.none(); + + @Test + public void use_project_name_prefix_in_construct_compose_command() { + List command = ProjectName.random().constructComposeFileCommand(); + + assertThat(command).hasSize(2); + assertThat(command.get(0)).isEqualTo("--project-name"); + } + + @Test + public void produce_different_names_on_successive_calls_to_random() { + List firstCommand = ProjectName.random().constructComposeFileCommand(); + List secondCommand = ProjectName.random().constructComposeFileCommand(); + + assertThat(firstCommand).isNotEqualTo(secondCommand); + } + + @Test + public void have_eight_characters_long_random() { + String randomName = ProjectName.random().constructComposeFileCommand().get(1); + assertThat(randomName).hasSize(8); + } + + @Test + public void should_pass_name_to_command_in_from_string_factory() { + List command = ProjectName.fromString("projectname").constructComposeFileCommand(); + assertThat(command).containsExactly("--project-name", "projectname"); + } + + @Test + public void should_disallow_names_in_from_string_factory() { + List command = ProjectName.fromString("projectname").constructComposeFileCommand(); + assertThat(command).containsExactly("--project-name", "projectname"); + } + + @Test + public void reject_blanks_in_from_string() { + exception.expect(IllegalStateException.class); + exception.expectMessage("ProjectName must not be blank."); + ProjectName.fromString(" "); + } + + @Test + public void match_validation_behavior_of_docker_compose_cli() { + exception.expect(IllegalStateException.class); + exception.expectMessage("ProjectName 'Crazy#Proj ect!Name' not allowed, please use lowercase letters and numbers only."); + ProjectName.fromString("Crazy#Proj ect!Name"); + } + + @Test + public void should_return_the_project_name_when_asString_called() { + String projectName = ProjectName.fromString("projectname").asString(); + assertThat(projectName).isEqualTo("projectname"); + } +} + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidatorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidatorTests.java new file mode 100644 index 0000000000..b924e33176 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidatorTests.java @@ -0,0 +1,76 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; + +import java.util.HashMap; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.RemoteEnvironmentValidator; + +public class RemoteEnvironmentValidatorTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_if_docker_host_is_not_set() { + Map variables = new HashMap<>(); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_HOST); + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void throw_exception_if_docker_cert_path_is_missing_and_tls_is_on() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_CERT_PATH); + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void validate_environment_with_all_valid_variables_set_without_tls() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } + + @Test + public void validate_environment_with_all_valid_variables_set_with_tls() { + Map variables = new HashMap<>(); + variables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + variables.put(DOCKER_TLS_VERIFY, "1"); + variables.put(DOCKER_CERT_PATH, "/path/to/certs"); + variables.put("SOME_VARIABLE", "SOME_VALUE"); + + RemoteEnvironmentValidator.instance().validateEnvironmentVariables(variables); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolverTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolverTests.java new file mode 100644 index 0000000000..56c115ab46 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteHostIpResolverTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.configuration; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.TCP_PROTOCOL; + +import org.hamcrest.Matchers; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.RemoteHostIpResolver; + +public class RemoteHostIpResolverTests { + + private static final String IP = "192.168.99.100"; + private static final int PORT = 2376; + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void result_in_error_blank_when_resolving_invalid_docker_host() { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("DOCKER_HOST cannot be blank/null"); + new RemoteHostIpResolver().resolveIp(""); + } + + @Test + public void result_in_error_null_when_resolving_invalid_docker_host() { + exception.expect(IllegalArgumentException.class); + exception.expectMessage("DOCKER_HOST cannot be blank/null"); + new RemoteHostIpResolver().resolveIp(null); + } + + @Test + public void resolve_docker_host_with_port() { + String dockerHost = String.format("%s%s:%d", TCP_PROTOCOL, IP, PORT); + assertThat(new RemoteHostIpResolver().resolveIp(dockerHost)).isEqualTo(IP); + } + + @Test + public void resolve_docker_host_without_port() { + String dockerHost = String.format("%s%s", TCP_PROTOCOL, IP); + assertThat(new RemoteHostIpResolver().resolveIp(dockerHost)).isEqualTo(IP); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCacheTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCacheTests.java new file mode 100644 index 0000000000..35a5dc829c --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerCacheTests.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import org.junit.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +public class ContainerCacheTests { + + private static final String CONTAINER_NAME = "container"; + + private final Docker docker = mock(Docker.class); + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final ContainerCache containers = new ContainerCache(docker, dockerCompose); + + @Test + public void return_a_container_with_the_specified_name_when_getting_a_new_container() { + Container container = containers.container(CONTAINER_NAME); + assertThat(container).isEqualTo(new Container(CONTAINER_NAME, docker, dockerCompose)); + } + + @Test + public void return_the_same_object_when_getting_a_container_twice() { + Container container = containers.container(CONTAINER_NAME); + Container sameContainer = containers.container(CONTAINER_NAME); + assertThat(container).isSameAs(sameContainer); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerIntegrationTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerIntegrationTests.java new file mode 100644 index 0000000000..26405219e9 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerIntegrationTests.java @@ -0,0 +1,89 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.IOException; +import java.util.concurrent.TimeUnit; + +import org.awaitility.core.ConditionFactory; +import org.junit.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles; +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DefaultDockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutable; + +import static org.awaitility.Awaitility.await; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.Assert.assertEquals; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument.arguments; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.noOptions; + +public class ContainerIntegrationTests { + + private static final ConditionFactory wait = await().atMost(10, TimeUnit.SECONDS); + + private final DockerMachine dockerMachine = DockerMachine.localMachine().build(); + private final Docker docker = new Docker(DockerExecutable.builder() + .dockerConfiguration(dockerMachine) + .build()); + + @Test + public void testStateChanges_withoutHealthCheck() throws IOException, InterruptedException { + DockerCompose dockerCompose = new DefaultDockerCompose( + DockerComposeFiles.from("src/test/resources/no-healthcheck.yaml"), + dockerMachine, + ProjectName.random()); + + // The noHealthcheck service has no healthcheck specified; it should be immediately healthy + Container container = new Container("noHealthcheck", docker, dockerCompose); + assertEquals(State.DOWN, container.state()); + container.up(); + assertEquals(State.HEALTHY, container.state()); + container.kill(); + assertEquals(State.DOWN, container.state()); + } + + /** + * This test is not currently enabled in Circle as it does not provide a sufficiently recent version of docker-compose. + * + * @see Issue #156 + */ + @Test + public void testStateChanges_withHealthCheck() throws IOException, InterruptedException { +// assumeThat("docker version", Docker.version(), new GreaterOrEqual<>(Version.forIntegers(1, 12, 0))); +// assumeThat("docker-compose version", DockerCompose.version(), new GreaterOrEqual<>(Version.forIntegers(1, 10, 0))); + + DockerCompose dockerCompose = new DefaultDockerCompose( + DockerComposeFiles.from("src/test/resources/native-healthcheck.yaml"), + dockerMachine, + ProjectName.random()); + + // The withHealthcheck service's healthcheck checks every 100ms whether the file "healthy" exists + Container container = new Container("withHealthcheck", docker, dockerCompose); + assertEquals(State.DOWN, container.state()); + container.up(); + assertEquals(State.UNHEALTHY, container.state()); + dockerCompose.exec(noOptions(), "withHealthcheck", arguments("touch", "healthy")); + wait.until(container::state, equalTo(State.HEALTHY)); + dockerCompose.exec(noOptions(), "withHealthcheck", arguments("rm", "healthy")); + wait.until(container::state, equalTo(State.UNHEALTHY)); + container.kill(); + assertEquals(State.DOWN, container.state()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNameTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNameTests.java new file mode 100644 index 0000000000..de34845325 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerNameTests.java @@ -0,0 +1,99 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.List; + +import org.junit.Test; + +import static java.util.Collections.emptyList; +import static org.assertj.core.api.Assertions.assertThat; + +public class ContainerNameTests { + + @Test + public void parse_a_semantic_and_raw_name_correctly_from_a_single_line() { + ContainerName actual = ContainerName.fromPsLine("dir_db_1 other line contents"); + + ContainerName expected = ContainerName.builder() + .rawName("dir_db_1") + .semanticName("db") + .build(); + + assertThat(actual).isEqualTo(expected); + } + + @Test + public void can_handle_custom_container_names() { + ContainerName name = ContainerName.fromPsLine("test-1.container.name /docker-entrypoint.sh postgres Up 5432/tcp"); + + ContainerName expected = ContainerName.builder() + .rawName("test-1.container.name") + .semanticName("test-1.container.name") + .build(); + + assertThat(name).isEqualTo(expected); + } + + @Test + public void result_in_no_container_names_when_ps_output_is_empty() { + List names = ContainerNames.parseFromDockerComposePs("\n----\n"); + assertThat(names).isEqualTo(emptyList()); + } + + @Test + public void result_in_a_single_container_name_when_ps_output_has_a_single_container() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents"); + assertThat(names).containsExactly(containerName("dir", "db", "1")); + } + + @Test + public void allow_windows_newline_characters() { + List names = ContainerNames.parseFromDockerComposePs("\r\n----\r\ndir_db_1 other line contents"); + assertThat(names).containsExactly(containerName("dir", "db", "1")); + } + + @Test + public void allow_containers_with_underscores_in_their_name() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_left_right_1 other line contents"); + assertThat(names).containsExactly(containerName("dir", "left_right", "1")); + } + + @Test + public void result_in_two_container_names_when_ps_output_has_two_containers() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents\ndir_db2_1 other stuff"); + assertThat(names).containsExactly(containerName("dir", "db", "1"), containerName("dir", "db2", "1")); + } + + @Test + public void ignore_an_empty_line_in_ps_output() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents\n\n"); + assertThat(names).containsExactly(containerName("dir", "db", "1")); + } + + @Test + public void ignore_a_line_with_ony_spaces_in_ps_output() { + List names = ContainerNames.parseFromDockerComposePs("\n----\ndir_db_1 other line contents\n \n"); + assertThat(names).containsExactly(containerName("dir", "db", "1")); + } + + private static ContainerName containerName(String project, String semantic, String number) { + return ContainerName.builder() + .rawName(project + "_" + semantic + "_" + number) + .semanticName(semantic) + .build(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerTests.java new file mode 100644 index 0000000000..9b52fae436 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/ContainerTests.java @@ -0,0 +1,152 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.io.IOException; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.MockDockerEnvironment; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failureWithMessage; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; +// @checkstyle:on + +public class ContainerTests { + + private static final String IP = "127.0.0.1"; + + @Rule + public ExpectedException exception = ExpectedException.none(); + + private final Docker docker = mock(Docker.class); + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final MockDockerEnvironment env = new MockDockerEnvironment(dockerCompose); + private final Container container = new Container("service", docker, dockerCompose); + + @Test + public void return_port_for_container_when_external_port_number_given() throws Exception { + DockerPort expected = env.availableService("service", IP, 5433, 5432); + DockerPort port = container.portMappedExternallyTo(5433); + assertThat(port).isEqualTo(expected); + } + + @Test + public void return_port_for_container_when_internal_port_number_given() throws Exception { + DockerPort expected = env.availableService("service", IP, 5433, 5432); + DockerPort port = container.port(5432); + assertThat(port).isEqualTo(expected); + } + + @Test + public void call_docker_ports_once_when_two_ports_are_requested() throws Exception { + env.ports("service", IP, 8080, 8081); + container.port(8080); + container.port(8081); + verify(dockerCompose, times(1)).ports("service"); + } + + @Test + public void return_updated_external_port_on_restart() throws IOException, InterruptedException { + int internalPort = 5432; + env.ephemeralPort("service", IP, internalPort); + + DockerPort port = container.port(internalPort); + int prePort = port.getExternalPort(); + + DockerPort samePort = container.port(internalPort); + assertThat(prePort).isEqualTo(samePort.getExternalPort()); + + container.stop(); + container.start(); + + DockerPort updatedPort = container.port(internalPort); + assertThat(prePort, not(is(updatedPort.getExternalPort()))); + } + + @Test + public void throw_illegal_argument_exception_when_a_port_for_an_unknown_external_port_is_requested() + throws Exception { + // Service must have ports otherwise we end up with an exception telling you the service is listening at all + env.availableService("service", IP, 5400, 5400); + exception.expect(IllegalArgumentException.class); + exception.expectMessage("No port mapped externally to '5432' for container 'service'"); + container.portMappedExternallyTo(5432); + } + + @Test + public void throw_illegal_argument_exception_when_a_port_for_an_unknown_internal_port_is_requested() + throws Exception { + env.availableService("service", IP, 5400, 5400); + exception.expect(IllegalArgumentException.class); + exception.expectMessage("No internal port '5432' for container 'service'"); + container.port(5432); + } + + @Test + public void have_all_ports_open_if_all_exposed_ports_are_open() throws Exception { + env.availableHttpService("service", IP, 1234, 1234); + + assertThat(container.areAllPortsOpen(), successful()); + } + + @Test + public void not_have_all_ports_open_if_has_at_least_one_closed_port_and_report_the_name_of_the_port() throws Exception { + int unavailablePort = 4321; + String unavailablePortString = Integer.toString(unavailablePort); + + env.availableService("service", IP, 1234, 1234); + env.unavailableService("service", IP, unavailablePort, unavailablePort); + + assertThat(container.areAllPortsOpen(), failureWithMessage(containsString(unavailablePortString))); + } + + @Test + public void be_listening_on_http_when_the_port_is() throws Exception { + env.availableHttpService("service", IP, 1234, 2345); + + assertThat( + container.portIsListeningOnHttp(2345, port -> "http://some.url/" + port), + successful()); + } + + @Test + public void not_be_listening_on_http_when_the_port_is_not_and_reports_the_port_number_and_url() throws Exception { + int unavailablePort = 1234; + String unvaliablePortString = Integer.toString(unavailablePort); + + env.unavailableHttpService("service", IP, unavailablePort, unavailablePort); + + assertThat( + container.portIsListeningOnHttp(unavailablePort, port -> "http://some.url/" + port.getInternalPort()), + failureWithMessage( + containsString("http://some.url/" + unvaliablePortString) + )); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPortFormattingTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPortFormattingTests.java new file mode 100644 index 0000000000..350b72009d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/DockerPortFormattingTests.java @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +public class DockerPortFormattingTests { + private final DockerPort dockerPort = new DockerPort("hostname", 1234, 4321); + + @Test public void + have_no_effect_on_a_string_with_no_substitutions() { + assertThat(dockerPort.inFormat("no substitutions")).isEqualTo("no substitutions"); + } + + @Test public void + allow_building_an_externally_accessible_address() { + assertThat(dockerPort.inFormat("http://$HOST:$EXTERNAL_PORT/api")).isEqualTo("http://hostname:1234/api"); + } + + @Test public void + allow_building_an_address_with_an_internal_port() { + assertThat(dockerPort.inFormat("http://localhost:$INTERNAL_PORT/api")).isEqualTo("http://localhost:4321/api"); + } + + @Test public void + allow_multiple_copies_of_each_substitution() { + assertThat(dockerPort.inFormat("$HOST,$HOST,$INTERNAL_PORT,$INTERNAL_PORT,$EXTERNAL_PORT,$EXTERNAL_PORT")).isEqualTo("hostname,hostname,4321,4321,1234,1234"); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/LocalBuilderTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/LocalBuilderTests.java new file mode 100644 index 0000000000..b7d25fbcc3 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/LocalBuilderTests.java @@ -0,0 +1,231 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine.LocalBuilder; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.not; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DaemonHostIpResolver.LOCALHOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerType.DAEMON; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerType.REMOTE; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.DockerMachineEnvironmentMatcher.containsEnvironment; + +public class LocalBuilderTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void override_previous_environment_when_additional_environment_set_twice_daemon() { + Map environment1 = new HashMap<>(); + environment1.put("ENV_1", "VAL_1"); + Map environment2 = new HashMap<>(); + environment2.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).withEnvironment(environment1) + .withEnvironment(environment2) + .build(); + assertThat(localMachine, not(containsEnvironment(environment1))); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void be_union_of_additional_environment_and_individual_environment_when_both_set_daemon() { + Map environment = new HashMap<>(); + environment.put("ENV_1", "VAL_1"); + environment.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).withEnvironment(environment) + .withAdditionalEnvironmentVariable("ENV_3", "VAL_3") + .build(); + assertThat(localMachine, containsEnvironment(environment)); + Map environment2 = new HashMap<>(); + environment2.put("ENV_3", "VAL_3"); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void override_previous_environment_with_additional_environment_set_twice_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + Map environment1 = new HashMap<>(); + environment1.put("ENV_1", "VAL_1"); + Map environment2 = new HashMap<>(); + environment2.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).withEnvironment(environment1) + .withEnvironment(environment2) + .build(); + assertThat(localMachine, not(containsEnvironment(environment1))); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void be_union_of_additional_environment_and_individual_environment_when_both_set_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + Map environment = new HashMap<>(); + environment.put("ENV_1", "VAL_1"); + environment.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).withEnvironment(environment) + .withAdditionalEnvironmentVariable("ENV_3", "VAL_3") + .build(); + assertThat(localMachine, containsEnvironment(environment)); + Map environment2 = new HashMap<>(); + environment2.put("ENV_3", "VAL_3"); + assertThat(localMachine, containsEnvironment(environment2)); + } + + @Test + public void get_variable_overriden_with_additional_environment() { + Map environment = new HashMap<>(); + environment.put("ENV_1", "VAL_1"); + environment.put("ENV_2", "VAL_2"); + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).withEnvironment(environment) + .withAdditionalEnvironmentVariable("ENV_2", "DIFFERENT_VALUE") + .build(); + + Map expected = new HashMap<>(); + expected.put("ENV_1", "VAL_1"); + expected.put("ENV_2", "DIFFERENT_VALUE"); + assertThat(localMachine, not(containsEnvironment(environment))); + assertThat(localMachine, containsEnvironment(expected)); + } + + @Test + public void override_system_environment_with_additional_environment() { + Map systemEnv = new HashMap<>(); + systemEnv.put("ENV_1", "VAL_1"); + Map overrideEnv = new HashMap<>(); + overrideEnv.put("ENV_1", "DIFFERENT_VALUE"); + DockerMachine localMachine = new LocalBuilder(DAEMON, systemEnv) + .withEnvironment(overrideEnv) + .build(); + + assertThat(localMachine, not(containsEnvironment(systemEnv))); + assertThat(localMachine, containsEnvironment(overrideEnv)); + } + + @Test + public void have_invalid_variables_daemon() { + Map invalidDockerVariables = new HashMap<>(); + invalidDockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + invalidDockerVariables.put(DOCKER_TLS_VERIFY, "1"); + invalidDockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("These variables were set"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage(DOCKER_CERT_PATH); + exception.expectMessage(DOCKER_TLS_VERIFY); + exception.expectMessage("They cannot be set when connecting to a local docker daemon"); + + new LocalBuilder(DAEMON, invalidDockerVariables).build(); + } + + @Test + public void have_invalid_additional_variables_daemon() { + exception.expect(IllegalStateException.class); + exception.expectMessage("The following variables"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage("cannot exist in your additional environment variable block"); + + new LocalBuilder(DAEMON, new HashMap<>()).withAdditionalEnvironmentVariable(DOCKER_HOST, "tcp://192.168.99.100:2376") + .build(); + } + + @Test + public void have_invalid_additional_variables_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + dockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("The following variables"); + exception.expectMessage(DOCKER_HOST); + exception.expectMessage("cannot exist in your additional environment variable block"); + + new LocalBuilder(REMOTE, dockerVariables).withAdditionalEnvironmentVariable(DOCKER_HOST, "tcp://192.168.99.101:2376") + .build(); + } + + @Test + public void return_localhost_as_ip_daemon() { + DockerMachine localMachine = new LocalBuilder(DAEMON, new HashMap<>()).build(); + assertThat(localMachine.getIp()).isEqualTo(LOCALHOST); + } + + @Test + public void return_docker_host_as_ip_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + dockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).build(); + assertThat(localMachine.getIp()).isEqualTo("192.168.99.100"); + } + + @Test + public void have_missing_docker_host_remote() { + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_HOST); + new LocalBuilder(REMOTE, new HashMap<>()).build(); + } + + @Test + public void build_without_tls_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).build(); + assertThat(localMachine, containsEnvironment(dockerVariables)); + } + + @Test + public void have_missing_cert_path_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables: "); + exception.expectMessage(DOCKER_CERT_PATH); + new LocalBuilder(REMOTE, dockerVariables).build(); + } + + @Test + public void build_with_tls_remote() { + Map dockerVariables = new HashMap<>(); + dockerVariables.put(DOCKER_HOST, "tcp://192.168.99.100:2376"); + dockerVariables.put(DOCKER_TLS_VERIFY, "1"); + dockerVariables.put(DOCKER_CERT_PATH, "/path/to/certs"); + + DockerMachine localMachine = new LocalBuilder(REMOTE, dockerVariables).build(); + assertThat(localMachine, containsEnvironment(dockerVariables)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortsTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortsTests.java new file mode 100644 index 0000000000..a00c05c04b --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/PortsTests.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.Arrays; + +import org.junit.Test; + +import static java.util.Collections.emptyList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + + +public class PortsTests { + + private static final String LOCALHOST_IP = "127.0.0.1"; + + @Test + public void result_in_no_ports_when_there_are_no_ports_in_ps_output() { + String psOutput = "------"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, null); + Ports expected = new Ports(emptyList()); + assertThat(ports).isEqualTo(expected); + } + + @Test + public void result_in_single_port_when_there_is_single_tcp_port_mapping() { + String psOutput = "0.0.0.0:5432->5432/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort(LOCALHOST_IP, 5432, 5432))); + assertThat(ports).isEqualTo(expected); + } + + @Test + public void + result_in_single_port_with_ip_other_than_localhost_when_there_is_single_tcp_port_mapping() { + String psOutput = "10.0.1.2:1234->2345/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort("10.0.1.2", 1234, 2345))); + assertThat(ports).isEqualTo(expected); + } + + @Test + public void result_in_two_ports_when_there_are_two_tcp_port_mappings() { + String psOutput = "0.0.0.0:5432->5432/tcp, 0.0.0.0:5433->5432/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort(LOCALHOST_IP, 5432, 5432), + new DockerPort(LOCALHOST_IP, 5433, 5432))); + assertThat(ports).isEqualTo(expected); + } + + @Test + public void result_in_no_ports_when_there_is_a_non_mapped_exposed_port() { + String psOutput = "5432/tcp"; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(emptyList()); + assertThat(ports).isEqualTo(expected); + } + + @Test + public void parse_actual_docker_compose_output() { + String psOutput = + " Name Command State Ports \n" + + "-------------------------------------------------------------------------------------------------------------------------------------------------\n" + + "postgres_postgres_1 /bin/sh -c /usr/local/bin/ ... Up 0.0.0.0:8880->8880/tcp, 8881/tcp, 8882/tcp, 8883/tcp, 8884/tcp, 8885/tcp, 8886/tcp \n" + + ""; + Ports ports = Ports.parseFromDockerComposePs(psOutput, LOCALHOST_IP); + Ports expected = new Ports(Arrays.asList(new DockerPort(LOCALHOST_IP, 8880, 8880))); + assertThat(ports).isEqualTo(expected); + } + + @Test + public void throw_illegal_state_exception_when_no_running_container_found_for_service() { + assertThatThrownBy(() -> Ports.parseFromDockerComposePs("", ""), + "Expected Ports.parseFromDockerComposePs to throw, but it didn't") + .hasMessageContaining("No container found") + .isInstanceOf(IllegalStateException.class); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/RemoteBuilderTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/RemoteBuilderTests.java new file mode 100644 index 0000000000..01625c1d27 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/RemoteBuilderTests.java @@ -0,0 +1,91 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH; +import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST; + +public class RemoteBuilderTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void throw_exception_when_building_a_docker_machine_without_a_host() { + exception.expect(IllegalStateException.class); + exception.expectMessage("Missing required environment variables"); + exception.expectMessage("DOCKER_HOST"); + DockerMachine.remoteMachine() + .withoutTLS() + .build(); + } + + @Test + public void have_no_tls_environment_variables_when_a_docker_machine_is_built_without_tls() { + DockerMachine dockerMachine = DockerMachine.remoteMachine() + .host("tcp://192.168.99.100") + .withoutTLS() + .build(); + + Map expected = new HashMap<>(); + expected.put(DOCKER_HOST, "tcp://192.168.99.100"); + + validateEnvironmentConfiguredDirectly(dockerMachine, expected); + } + + @Test + public void have_tls_environment_variables_set_when_a_docker_machine_is_built_with_tls() { + DockerMachine dockerMachine = DockerMachine.remoteMachine() + .host("tcp://192.168.99.100") + .withTLS("/path/to/certs") + .build(); + + Map expected = new HashMap<>(); + expected.put(DOCKER_HOST, "tcp://192.168.99.100"); + expected.put(DOCKER_CERT_PATH, "/path/to/certs"); + validateEnvironmentConfiguredDirectly(dockerMachine, expected); + } + + @Test + public void build_a_docker_machine_with_additional_environment_variables() { + DockerMachine dockerMachine = DockerMachine.remoteMachine() + .host("tcp://192.168.99.100") + .withoutTLS() + .withAdditionalEnvironmentVariable("SOME_VARIABLE", "SOME_VALUE") + .build(); + + Map expected = new HashMap<>(); + expected.put(DOCKER_HOST, "tcp://192.168.99.100"); + expected.put("SOME_VARIABLE", "SOME_VALUE"); + validateEnvironmentConfiguredDirectly(dockerMachine, expected); + } + + private static void validateEnvironmentConfiguredDirectly(DockerMachine dockerMachine, Map expectedEnvironment) { + ProcessBuilder process = dockerMachine.configuredDockerComposeProcess(); + + Map environment = process.environment(); + expectedEnvironment.forEach((var, val) -> assertThat(environment).containsEntry(var, val)); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWaitTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWaitTests.java new file mode 100644 index 0000000000..2b1a2f62e2 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ClusterWaitTests.java @@ -0,0 +1,75 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import org.joda.time.Duration; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerCache; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailure.success; + +public class ClusterWaitTests { + + private static final Duration DURATION = Duration.standardSeconds(1); + private static final String IP = "192.168.100.100"; + + private final ContainerCache containerCache = mock(ContainerCache.class); + private final ClusterHealthCheck clusterHealthCheck = mock(ClusterHealthCheck.class); + + private final Cluster cluster = Cluster.builder() + .containerCache(containerCache) + .ip(IP) + .build(); + + @Rule public ExpectedException exception = ExpectedException.none(); + + + @Test public void + return_when_a_cluster_is_ready() throws InterruptedException { + when(clusterHealthCheck.isClusterHealthy(cluster)).thenReturn(success()); + ClusterWait wait = new ClusterWait(clusterHealthCheck, DURATION); + wait.waitUntilReady(cluster); + } + + @Test public void + check_until_a_cluster_is_ready() throws InterruptedException { + when(clusterHealthCheck.isClusterHealthy(cluster)).thenReturn(failure("failure!"), success()); + ClusterWait wait = new ClusterWait(clusterHealthCheck, DURATION); + wait.waitUntilReady(cluster); + verify(clusterHealthCheck, times(2)).isClusterHealthy(cluster); + } + + @Test(timeout = 2000L) public void + timeout_if_the_cluster_is_not_healthy() throws InterruptedException { + when(clusterHealthCheck.isClusterHealthy(cluster)).thenReturn(failure("failure!")); + + exception.expect(IllegalStateException.class); + exception.expectMessage("failure!"); + + ClusterWait wait = new ClusterWait(clusterHealthCheck, DURATION); + + wait.waitUntilReady(cluster); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ExceptionsTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ExceptionsTests.java new file mode 100644 index 0000000000..994951c8e4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/ExceptionsTests.java @@ -0,0 +1,30 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +public class ExceptionsTests { + @Test + public void print_out_a_condensed_version_of_the_stacktrace() { + RuntimeException exception = new RuntimeException("foo", new IllegalStateException("bar", new UnsupportedOperationException("baz"))); + assertThat(Exceptions.condensedStacktraceFor(exception)).isEqualTo("java.lang.RuntimeException: foo\n" + + "java.lang.IllegalStateException: bar\n" + + "java.lang.UnsupportedOperationException: baz"); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HttpHealthCheckTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HttpHealthCheckTests.java new file mode 100644 index 0000000000..60b8cfdb81 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/HttpHealthCheckTests.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import java.util.function.Function; + +import org.junit.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; + +public class HttpHealthCheckTests { + private static final Function URL_FUNCTION = port -> null; + public static final int PORT = 1234; + private final Container container = mock(Container.class); + + @Test + public void be_healthy_when_the_port_is_listening_over_http() { + whenTheContainerIsListeningOnHttpTo(PORT, URL_FUNCTION); + + assertThat( + HealthChecks.toRespondOverHttp(PORT, URL_FUNCTION).isHealthy(container), + successful()); + } + + @Test + public void be_unhealthy_when_all_ports_are_not_listening() { + whenTheContainerIsNotListeningOnHttpTo(PORT, URL_FUNCTION); + + assertThat( + HealthChecks.toRespondOverHttp(PORT, URL_FUNCTION).isHealthy(container), + failure()); + } + + private void whenTheContainerIsListeningOnHttpTo(int port, Function urlFunction) { + when(container.portIsListeningOnHttp(port, urlFunction)).thenReturn(SuccessOrFailure.success()); + } + + private void whenTheContainerIsNotListeningOnHttpTo(int port, Function urlFunction) { + when(container.portIsListeningOnHttp(port, urlFunction)).thenReturn(SuccessOrFailure.failure("not listening")); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/PortsHealthCheckTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/PortsHealthCheckTests.java new file mode 100644 index 0000000000..f1c951c43b --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/PortsHealthCheckTests.java @@ -0,0 +1,53 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import org.junit.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; + +public class PortsHealthCheckTests { + private final HealthCheck healthCheck = HealthChecks.toHaveAllPortsOpen(); + private final Container container = mock(Container.class); + + @Test + public void be_healthy_when_all_ports_are_listening() { + whenTheContainerHasAllPortsOpen(); + + assertThat(healthCheck.isHealthy(container), successful()); + } + + @Test + public void be_unhealthy_when_all_ports_are_not_listening() { + whenTheContainerDoesNotHaveAllPortsOpen(); + + assertThat(healthCheck.isHealthy(container), failure()); + } + + private void whenTheContainerDoesNotHaveAllPortsOpen() { + when(container.areAllPortsOpen()).thenReturn(SuccessOrFailure.failure("not all ports open")); + } + + private void whenTheContainerHasAllPortsOpen() { + when(container.areAllPortsOpen()).thenReturn(SuccessOrFailure.success()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureMatchers.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureMatchers.java new file mode 100644 index 0000000000..6aa53c2678 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureMatchers.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import org.hamcrest.Description; +import org.hamcrest.FeatureMatcher; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeDiagnosingMatcher; + +import static org.hamcrest.Matchers.anything; +import static org.hamcrest.Matchers.equalTo; + +public class SuccessOrFailureMatchers { + + public static class Successful extends TypeSafeDiagnosingMatcher { + @Override + protected boolean matchesSafely(SuccessOrFailure item, Description mismatchDescription) { + if (item.failed()) { + mismatchDescription.appendValue(item); + } + + return item.succeeded(); + } + + @Override + public void describeTo(Description description) { + description.appendText("is successful"); + } + } + + public static Matcher successful() { + return new Successful(); + } + + public static class Failure extends FeatureMatcher { + public Failure(Matcher subMatcher) { + super(subMatcher, "failure message of", "failure message"); + } + + @Override + protected String featureValueOf(SuccessOrFailure actual) { + return actual.failureMessage(); + } + + @Override + protected boolean matchesSafely(SuccessOrFailure actual, Description mismatch) { + if (actual.succeeded()) { + mismatch.appendValue(actual); + return false; + } + + return super.matchesSafely(actual, mismatch); + } + } + + public static Matcher failure() { + return new Failure(anything()); + } + + public static Matcher failureWithMessage(Matcher messageMatcher) { + return new Failure(messageMatcher); + } + + public static Matcher failureWithMessage(String message) { + return new Failure(equalTo(message)); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureTests.java new file mode 100644 index 0000000000..ffdfb7bebb --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/connection/waiting/SuccessOrFailureTests.java @@ -0,0 +1,70 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting; + +import org.junit.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failure; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.failureWithMessage; +import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.SuccessOrFailureMatchers.successful; + +public class SuccessOrFailureTests { + @Test + public void not_have_failed_if_actually_a_success() { + assertThat(SuccessOrFailure.success(), successful()); + } + + @Test + public void have_failed_if_actually_a_failure() { + assertThat(SuccessOrFailure.failure("oops"), failure()); + } + + @Test + public void return_the_failure_message_if_set() { + assertThat(SuccessOrFailure.failure("oops"), failureWithMessage("oops")); + } + + @Test + public void fail_from_an_exception() { + Exception exception = new RuntimeException("oh no"); + assertThat(SuccessOrFailure.fromException(exception), + failureWithMessage(containsString("oh no"))); + } + + @Test + public void succeed_on_a_lambda_that_returns_true() { + SuccessOrFailure successFromLambda = SuccessOrFailure.onResultOf(() -> true); + assertThat(successFromLambda, successful()); + } + + @Test + public void fail_on_a_lambda_that_throws_an_exception() { + SuccessOrFailure failureFromLambda = SuccessOrFailure.onResultOf(() -> { + throw new IllegalArgumentException("oh no"); + }); + + assertThat(failureFromLambda, failureWithMessage(containsString("oh no"))); + } + + @Test + public void fail_on_a_lambda_that_returns_false() { + SuccessOrFailure failureFromLambda = SuccessOrFailure.onResultOf(() -> false); + + assertThat(failureFromLambda, failureWithMessage("Attempt to complete healthcheck failed")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/CommandTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/CommandTests.java new file mode 100644 index 0000000000..81e24f9fdc --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/CommandTests.java @@ -0,0 +1,113 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +class CommandTests { + + private Process executedProcess = mock(Process.class); + private DockerComposeExecutable dockerComposeExecutable = mock(DockerComposeExecutable.class); + private ErrorHandler errorHandler = mock(ErrorHandler.class); + private Command dockerComposeCommand; + private final List consumedLogLines = new ArrayList<>(); + private final Consumer logConsumer = s -> consumedLogLines.add(s); + + @BeforeEach + void prepareForTest() throws IOException { + when(dockerComposeExecutable.commandName()).thenReturn("docker-compose"); + when(dockerComposeExecutable.execute(anyBoolean(), any(String[].class))).thenReturn(executedProcess); + dockerComposeCommand = new Command(dockerComposeExecutable, logConsumer); + givenTheUnderlyingProcessHasOutput(""); + givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(0); + } + + @Test + void invokeErrorHandlerWhenExitCodeOfTheExecutedProcessIsNonZero() throws Exception { + int expectedExitCode = 1; + givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(expectedExitCode); + dockerComposeCommand.execute(errorHandler, true, "rm", "-f"); + verify(errorHandler).handle(expectedExitCode, "", "docker-compose", "rm", "-f"); + } + + @Test + void notInvokeErrorHandlerWhenExitCodeOfTheExecutedProcessIsZero() throws Exception { + dockerComposeCommand.execute(errorHandler, true, "rm", "-f"); + verifyNoMoreInteractions(errorHandler); + } + + @Test + void returnOutputWhenExitCodeOfTheExecutedProcessIsNonZero() throws Exception { + String expectedOutput = "test output"; + givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(1); + givenTheUnderlyingProcessHasOutput(expectedOutput); + String commandOutput = dockerComposeCommand.execute(errorHandler, true, "rm", "-f"); + assertThat(commandOutput).isEqualTo(expectedOutput); + } + + @Test + void returnOutputWhenExitCodeOfTheExecutedProcessIsZero() throws Exception { + String expectedOutput = "test output"; + givenTheUnderlyingProcessHasOutput(expectedOutput); + String commandOutput = dockerComposeCommand.execute(errorHandler, true,"rm", "-f"); + assertThat(commandOutput).isEqualTo(expectedOutput); + } + + @Test + void giveTheOutputToTheSpecifiedConsumerAsItIsAvailable() throws Exception { + givenTheUnderlyingProcessHasOutput("line 1\nline 2"); + dockerComposeCommand.execute(errorHandler, true, "rm", "-f"); + assertThat(consumedLogLines).containsExactly("line 1", "line 2"); + } + + @Disabled("flaky test: https://circleci.com/gh/palantir/docker-compose-rule/378, 370, 367, 366") + @Test + void notCreateLongLivedThreadsAfterExecution() throws Exception { + Set preEntries = Thread.getAllStackTraces().keySet().stream().filter(Thread::isAlive).collect(Collectors.toSet()); + int preThreadCount = preEntries.size(); + dockerComposeCommand.execute(errorHandler, true, "rm", "-f"); + Set postEntries = Thread.getAllStackTraces().keySet().stream().filter(Thread::isAlive).collect(Collectors.toSet()); + int postThreadCount = postEntries.size(); + assertThat(postThreadCount).as(()-> "command thread pool has exited with extra threads:" + postEntries.removeAll(preEntries)).isEqualTo(preThreadCount); + } + + private void givenTheUnderlyingProcessHasOutput(String output) { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream(output)); + } + + private void givenTheUnderlyingProcessTerminatesWithAnExitCodeOf(int exitCode) { + when(executedProcess.exitValue()).thenReturn(exitCode); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerComposeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerComposeTests.java new file mode 100644 index 0000000000..c81dad8c4e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/ConflictingContainerRemovingDockerComposeTests.java @@ -0,0 +1,153 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class ConflictingContainerRemovingDockerComposeTests { + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final Docker docker = mock(Docker.class); + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void require_retry_attempts_to_be_at_least_1() { + exception.expect(IllegalStateException.class); + exception.expectMessage("retryAttempts must be at least 1, was 0"); + new ConflictingContainerRemovingDockerCompose(dockerCompose, docker, 0); + } + + @Test + public void call_up_only_once_if_successful() throws IOException, InterruptedException { + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(1)).up(); + verifyNoMoreInteractions(docker); + } + + @Test + public void call_rm_and_retry_up_if_conflicting_containers_exist() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")).doNothing() + .when(dockerCompose).up(); + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(2)).up(); + verify(docker).rm(new HashSet<>(Arrays.asList(conflictingContainer))); + } + + @Test + public void retry_specified_number_of_times() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + DockerExecutionException dockerException = new DockerExecutionException( + "The name \"" + conflictingContainer + "\" is already in use"); + doThrow(dockerException).doThrow(dockerException).doNothing().when(dockerCompose).up(); + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker, 3); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(3)).up(); + verify(docker, times(2)).rm(new HashSet<>(Arrays.asList(conflictingContainer))); + } + + @Test + public void ignore_docker_execution_exceptions_in_rm() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")).doNothing() + .when(dockerCompose).up(); + doThrow(DockerExecutionException.class).when(docker).rm(anySet()); + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + + verify(dockerCompose, times(2)).up(); + verify(docker).rm(new HashSet<>(Arrays.asList(conflictingContainer))); + } + + @Test + public void fail_on_non_docker_execution_exceptions_in_rm() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")).doNothing() + .when(dockerCompose).up(); + doThrow(RuntimeException.class).when(docker).rm(anySet()); + + exception.expect(RuntimeException.class); + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + } + + @Test + public void throw_exception_if_retry_attempts_exceeded() throws IOException, InterruptedException { + String conflictingContainer = "conflictingContainer"; + doThrow(new DockerExecutionException("The name \"" + conflictingContainer + "\" is already in use")) + .when(dockerCompose).up(); + + exception.expect(DockerExecutionException.class); + exception.expectMessage("docker-compose up failed"); + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + conflictingContainerRemovingDockerCompose.up(); + } + + @Test + public void parse_container_names_from_error_message() { + String conflictingContainer = "conflictingContainer"; + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + Set conflictingContainerNames = conflictingContainerRemovingDockerCompose + .getConflictingContainerNames("The name \"" + conflictingContainer + "\" is already in use"); + + assertEquals(new HashSet<>(Arrays.asList(conflictingContainer)), conflictingContainerNames); + } + + @Test + public void parse_container_names_from_error_message_since_v13() { + String conflictingContainer = "conflictingContainer"; + + ConflictingContainerRemovingDockerCompose conflictingContainerRemovingDockerCompose = new ConflictingContainerRemovingDockerCompose( + dockerCompose, docker); + Set conflictingContainerNames = conflictingContainerRemovingDockerCompose + .getConflictingContainerNames("The container name \"" + conflictingContainer + "\" is already in use"); + + assertEquals(new HashSet<>(Arrays.asList(conflictingContainer)), conflictingContainerNames); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocationsTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocationsTests.java new file mode 100644 index 0000000000..3ea227eb48 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerCommandLocationsTests.java @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +import static java.util.Optional.empty; +import static org.assertj.core.api.Assertions.assertThat; + +public class DockerCommandLocationsTests { + private static final String badLocation = "file/that/does/not/exist"; + private static final String otherBadLocation = "another/file/that/does/not/exist"; + + @Rule public TemporaryFolder folder = new TemporaryFolder(); + + private String goodLocation; + + @Before + public void before() throws IOException { + goodLocation = folder.newFile("docker-compose.yml").getAbsolutePath(); + } + + @Test public void + provide_the_first_docker_command_location_if_it_exists() { + DockerCommandLocations dockerCommandLocations = new DockerCommandLocations( + badLocation, + goodLocation, + otherBadLocation); + + assertThat(dockerCommandLocations.preferredLocation()).contains(goodLocation); + } + + @Test public void + skip_paths_from_environment_variables_that_are_unset() { + DockerCommandLocations dockerCommandLocations = new DockerCommandLocations( + System.getenv("AN_UNSET_DOCKER_COMPOSE_PATH"), + goodLocation); + + assertThat(dockerCommandLocations.preferredLocation()).contains(goodLocation); + } + + @Test public void + have_no_preferred_path_when_all_possible_paths_are_all_invalid() { + DockerCommandLocations dockerCommandLocations = new DockerCommandLocations( + badLocation); + + assertThat(dockerCommandLocations.preferredLocation()).isEqualTo(empty()); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOptionTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOptionTests.java new file mode 100644 index 0000000000..4a3f01679e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeExecOptionTests.java @@ -0,0 +1,30 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.noOptions; + +public class DockerComposeExecOptionTests { + + @Test public void + be_constructable_with_no_args() { + DockerComposeExecOption option = noOptions(); + assertThat(option.options()).isEmpty(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeTests.java new file mode 100644 index 0000000000..70caf10a69 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeTests.java @@ -0,0 +1,252 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Ports; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.assertj.core.api.Assertions.assertThatIllegalStateException; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument.arguments; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.options; + +class DockerComposeTests { + + private DockerComposeExecutable executor = mock(DockerComposeExecutable.class); + private DockerMachine dockerMachine = mock(DockerMachine.class); + private DockerCompose compose = new DefaultDockerCompose(executor, dockerMachine); + private Process executedProcess = mock(Process.class); + private Container container = mock(Container.class); + + @BeforeEach + void prepareForTest() throws IOException { + when(dockerMachine.getIp()).thenReturn("0.0.0.0"); + when(executor.commandName()).thenReturn("docker-compose"); + when(executor.execute(anyBoolean(), any(String[].class))).thenReturn(executedProcess); + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("0.0.0.0:7000->7000/tcp")); + when(executedProcess.exitValue()).thenReturn(0); + when(container.getContainerName()).thenReturn("my-container"); + } + + @Test + void callDockerComposeUpWithDaemonFlagOnUp() throws Exception { + compose.up(); + verify(executor).execute(true, "up", "-d"); + } + + @Test + void callDockerComposeRmWithForceAndVolumeFlagsOnRm() throws Exception { + compose.rm(); + verify(executor).execute(true,"rm", "--force", "-v"); + } + + @Test + void callDockerComposeStopOnStop() throws Exception { + compose.stop(container); + verify(executor).execute(true, "stop", "my-container"); + } + + @Test + void callDockerComposeStartOnStart() throws Exception { + compose.start(container); + verify(executor).execute(true, "start", "my-container"); + } + + @Test + void parseAndReturnsContainerNamesOnPs() throws Exception { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("ps\n----\ndir_db_1")); + List containerNames = compose.ps(); + verify(executor).execute(true,"ps"); + assertThat(containerNames).containsExactly(ContainerName.builder().semanticName("db").rawName("dir_db_1").build()); + } + + @Test + void callDockerComposeWithNoColourFlagOnLogs() throws IOException { + when(executedProcess.getInputStream()).thenReturn( + IOUtils.toInputStream("id"), + IOUtils.toInputStream("docker-compose version 1.5.6, build 1ad8866"), + IOUtils.toInputStream("logs")); + ByteArrayOutputStream output = new ByteArrayOutputStream(); + compose.writeLogs("db", output); + verify(executor).execute(true,"logs", "--no-color", "db"); + assertThat(new String(output.toByteArray(), StandardCharsets.UTF_8)).isEqualTo("logs"); + } + + @Test + void callDockerComposeWithNoContainerOnLogs() throws IOException { + reset(executor); + Process mockIdProcess = mock(Process.class); + when(mockIdProcess.exitValue()).thenReturn(0); + InputStream emptyStream = IOUtils.toInputStream(""); + when(mockIdProcess.getInputStream()).thenReturn(emptyStream, emptyStream, emptyStream, IOUtils.toInputStream("id")); + when(executor.execute(true, "ps", "-q", "db")).thenReturn(mockIdProcess); + Process mockVersionProcess = mock(Process.class); + when(mockVersionProcess.exitValue()).thenReturn(0); + when(mockVersionProcess.getInputStream()).thenReturn(IOUtils.toInputStream("docker-compose version 1.5.6, build 1ad8866")); + when(executor.execute(false, "-v")).thenReturn(mockVersionProcess); + when(executor.execute(true, "logs", "--no-color", "db")).thenReturn(executedProcess); + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("logs")); + ByteArrayOutputStream output = new ByteArrayOutputStream(); + compose.writeLogs("db", output); + verify(executor, times(4)).execute(true,"ps", "-q", "db"); + verify(executor).execute(true,"logs", "--no-color", "db"); + assertThat(new String(output.toByteArray(), StandardCharsets.UTF_8)).isEqualTo("logs"); + } + + @Test + void callDockerComposeWithTheFollowFlagWhenVersionIsAtLeast_1_7_0_OnLogs() throws IOException { + when(executedProcess.getInputStream()).thenReturn( + IOUtils.toInputStream("id"), + IOUtils.toInputStream("docker-compose version 1.7.0, build 1ad8866"), + IOUtils.toInputStream("logs")); + ByteArrayOutputStream output = new ByteArrayOutputStream(); + compose.writeLogs("db", output); + verify(executor).execute(true,"logs", "--no-color", "--follow", "db"); + assertThat(new String(output.toByteArray(), StandardCharsets.UTF_8)).isEqualTo("logs"); + } + + @Test + void throwExceptionWhenKillExitsWithANonZeroExitCode() { + when(executedProcess.exitValue()).thenReturn(1); + assertThatExceptionOfType(DockerExecutionException.class) + .isThrownBy(() -> compose.kill()) + .withMessageStartingWith("'docker-compose kill' returned exit code 1"); + } + + @Test + void notThrowExceptionWhenDownFailsBecauseTheCommandDoesNotExist() throws Exception { + when(executedProcess.exitValue()).thenReturn(1); + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("No such command: down")); + compose.down(); + } + + @Test + void throwExceptionWhenDownFailsForAReasonOtherThanTheCommandNotBeingPresent() { + when(executedProcess.exitValue()).thenReturn(1); + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("")); + assertThatExceptionOfType(DockerExecutionException.class) + .isThrownBy(() -> compose.down()) + .withMessageStartingWith("'docker-compose down --volumes' returned exit code 1"); + } + + @Test + void useTheRemoveVolumesFlagWhenDownExists() throws Exception { + compose.down(); + verify(executor).execute(true, "down", "--volumes"); + } + + @Test + void parseThePsOutputOnPorts() throws Exception { + Ports ports = compose.ports("db"); + verify(executor).execute(true,"ps", "db"); + assertThat(ports).isEqualTo(new Ports(new DockerPort("0.0.0.0", 7000, 7000))); + } + + @Test + void throwIllegalStateExceptionWhereThereIsNoContainerFoundForPorts() { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("")); + assertThatIllegalStateException() + .isThrownBy(() -> compose.ports("db")) + .withMessage("No container with name 'db' found"); + } + + @Test + void failOnDockerComposeExecCommandIfVersionIsNotAtLeast_1_7_0() { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("docker-compose version 1.5.6, build 1ad8866")); + assertThatIllegalStateException() + .isThrownBy(() -> compose.exec(options("-d"), "container_1", arguments("ls"))) + .withMessage("You need at least docker-compose 1.7 to run docker-compose exec"); + } + + @Test + void passConcatenatedArgumentsToExecutorOnDockerComposeExec() throws Exception { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("docker-compose version 1.7.0rc1, build 1ad8866")); + compose.exec(options("-d"), "container_1", arguments("ls")); + verify(executor, times(1)).execute(true,"exec", "-T", "-d", "container_1", "ls"); + } + + @Test + void passConcatenatedArgumentsToExecutorOnDockerComposeRun() throws Exception { + compose.run(DockerComposeRunOption.options("-d"), "container_1", DockerComposeRunArgument.arguments("ls")); + verify(executor, times(1)).execute(true,"run", "-d", "container_1", "ls"); + } + + @Test + void returnTheOutputFromTheExecutedProcessOnDockerComposeExec() throws Exception { + String lsString = "-rw-r--r-- 1 user 1318458867 11326 Mar 9 17:47 LICENSE\n" + + "-rw-r--r-- 1 user 1318458867 12570 May 12 14:51 README.md"; + String versionString = "docker-compose version 1.7.0rc1, build 1ad8866"; + + Process mockVersionProcess = mock(Process.class); + when(mockVersionProcess.exitValue()).thenReturn(0); + when(mockVersionProcess.getInputStream()).thenReturn(IOUtils.toInputStream(versionString)); + + Process mockLs = mock(Process.class); + when(mockLs.exitValue()).thenReturn(0); + when(mockLs.getInputStream()).thenReturn(IOUtils.toInputStream(lsString, StandardCharsets.UTF_8)); + + DockerComposeExecutable processExecutor = mock(DockerComposeExecutable.class); + when(processExecutor.execute(true, "exec", "-T", "container_1", "ls", "-l")).thenReturn(mockLs); + when(processExecutor.execute(false, "-v")).thenReturn(mockVersionProcess); + + DockerCompose processCompose = new DefaultDockerCompose(processExecutor, dockerMachine); + + assertThat(processCompose.exec(options(), "container_1", arguments("ls", "-l"))).isEqualTo(lsString); + } + + @Test + void returnTheOutputFromTheExecutedProcessOnDockerComposeRun() throws Exception { + String lsString = String.format("-rw-r--r-- 1 user 1318458867 11326 Mar 9 17:47 LICENSE%n" + + "-rw-r--r-- 1 user 1318458867 12570 May 12 14:51 README.md"); + DockerComposeExecutable processExecutor = mock(DockerComposeExecutable.class); + addProcessToExecutor(processExecutor, processWithOutput(lsString), "run", "-it", "container_1", "ls", "-l"); + DockerCompose processCompose = new DefaultDockerCompose(processExecutor, dockerMachine); + assertThat(processCompose.run(DockerComposeRunOption.options("-it"), "container_1", DockerComposeRunArgument.arguments("ls", "-l"))).isEqualTo(lsString); + } + + private static void addProcessToExecutor(DockerComposeExecutable dockerComposeExecutable, Process process, String... commands) throws Exception { + when(dockerComposeExecutable.execute(true,commands)).thenReturn(process); + } + + private static Process processWithOutput(String output) { + Process mockedProcess = mock(Process.class); + when(mockedProcess.getInputStream()).thenReturn(IOUtils.toInputStream(output)); + when(mockedProcess.exitValue()).thenReturn(0); + return mockedProcess; + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersionTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersionTests.java new file mode 100644 index 0000000000..bf37cb4550 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerComposeVersionTests.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import com.github.zafarkhaja.semver.Version; +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + + +public class DockerComposeVersionTests { + + @Test + public void compare_major_versions_first() { + assertThat(Version.parse("2.1.0").compareTo(Version.parse("1.2.1"))).isGreaterThan(0); + } + + @Test + public void compare_minor_versions_when_major_versions_are_the_same() { + assertThat(Version.parse("2.1.7").compareTo(Version.parse("2.3.2"))).isLessThan(0); + } + + @Test + public void return_equals_for_the_same_version_strings() { + assertThat(Version.parse("2.1.2").compareTo(Version.parse("2.1.2"))).isEqualTo(0); + } + + @Test + public void remove_non_digits_when_passing_version_string() { + assertThat(DockerComposeVersion.parseFromDockerComposeVersion("docker-compose version 1.7.0rc1, build 1ad8866")).isEqualTo(Version.parse("1.7.0")); + } + public void check_for_docker_version() { + assertThat(DockerComposeVersion.parseFromDockerComposeVersion("Docker version 26.1.1, build 1ad8866")).isEqualTo(Version.parse("26.1.1")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerTests.java new file mode 100644 index 0000000000..904ce8679d --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/DockerTests.java @@ -0,0 +1,82 @@ +/* + * Copyright 2018-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; + +import com.github.zafarkhaja.semver.Version; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +class DockerTests { + + private DockerExecutable executor = mock(DockerExecutable.class); + private Docker docker = new Docker(executor); + private Process executedProcess = mock(Process.class); + + @BeforeEach + void prepareForTest() throws IOException { + when(executor.commandName()).thenReturn("docker-compose"); + when(executor.execute(anyBoolean())).thenReturn(executedProcess); + when(executor.execute(anyBoolean(), any(String[].class))).thenReturn(executedProcess); + when(executedProcess.exitValue()).thenReturn(0); + } + + @Test + void callDockerRmWithForceFlagOnRm() throws Exception { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("")); + docker.rm("testContainer"); + verify(executor).execute(false,"rm", "-f", "testContainer"); + } + + @Test + void callDockerNetworkLs() throws Exception { + String lsOutput = "0.0.0.0:7000->7000/tcp"; + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream(lsOutput)); + assertThat(docker.listNetworks()).isEqualTo(lsOutput); + verify(executor).execute(false, "network", "ls"); + } + + @Test + void callDockerNetworkPrune() throws Exception { + String lsOutput = "0.0.0.0:7000->7000/tcp"; + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream(lsOutput)); + assertThat(docker.pruneNetworks()).isEqualTo(lsOutput); + verify(executor).execute(false,"network", "prune", "--force"); + } + + @Test + void understandOldVersionFormat() throws Exception { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("Docker version 1.7.2")); + Version version = docker.configuredVersion(); + assertThat(version).isEqualTo(Version.parse("1.7.2")); + } + + @Test + void understandNewVersionFormat() throws Exception { + when(executedProcess.getInputStream()).thenReturn(IOUtils.toInputStream("Docker version 17.03.1-ce")); + Version version = docker.configuredVersion(); + assertThat(version).isEqualTo(Version.parse("17.3.1")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategyTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategyTests.java new file mode 100644 index 0000000000..6ea42effae --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/GracefulShutdownStrategyTests.java @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.junit.Test; +import org.mockito.InOrder; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; + +public class GracefulShutdownStrategyTests { + + @Test + public void call_down_then_kill_then_rm() throws Exception { + DockerCompose dockerCompose = mock(DockerCompose.class); + Docker docker = mock(Docker.class); + + ShutdownStrategy.GRACEFUL.shutdown(dockerCompose, docker); + + InOrder inOrder = inOrder(dockerCompose, docker); + inOrder.verify(dockerCompose).down(); + inOrder.verify(dockerCompose).kill(); + inOrder.verify(dockerCompose).rm(); + inOrder.verify(docker).pruneNetworks(); + inOrder.verifyNoMoreInteractions(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategyTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategyTests.java new file mode 100644 index 0000000000..e4a815ef25 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/KillDownShutdownStrategyTests.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.junit.Test; +import org.mockito.InOrder; + +import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy; + +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; + +public class KillDownShutdownStrategyTests { + + @Test + public void call_kill_then_down() throws Exception { + DockerCompose dockerCompose = mock(DockerCompose.class); + Docker docker = mock(Docker.class); + + ShutdownStrategy.KILL_DOWN.shutdown(dockerCompose, docker); + + InOrder inOrder = inOrder(dockerCompose, docker); + inOrder.verify(dockerCompose).kill(); + inOrder.verify(dockerCompose).down(); + inOrder.verify(docker).pruneNetworks(); + inOrder.verifyNoMoreInteractions(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryerTests.java new file mode 100644 index 0000000000..195c533ad3 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryerTests.java @@ -0,0 +1,112 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import org.joda.time.Duration; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import org.springframework.cloud.dataflow.common.test.docker.compose.utils.MockitoMultiAnswer; +import org.springframework.util.StopWatch; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class RetryerTests { + @Mock private Retryer.RetryableDockerOperation operation; + private final Retryer retryer = new Retryer(1, Duration.millis(0)); + + @Test + public void not_retry_if_the_operation_was_successful_and_return_result() throws Exception { + when(operation.call()).thenReturn("hi"); + + assertThat(retryer.runWithRetries(operation)).isEqualTo("hi"); + verify(operation).call(); + } + + @Test + public void should_not_pause_after_last_failure() throws Exception { + Retryer failFast = new Retryer(0, Duration.standardSeconds(1)); + when(operation.call()).thenThrow(new DockerExecutionException()); + StopWatch stopwatch = new StopWatch(); + stopwatch.start(); + try { + failFast.runWithRetries(operation); + } catch (DockerExecutionException e) { + // expected + } + stopwatch.stop(); + assertThat(stopwatch.getTotalTimeMillis()).isLessThan(1000L); + } + + @Test + public void retryer_should_wait_after_failure_before_trying_again() throws Exception { + Retryer timeRetryer = new Retryer(1, Duration.millis(100)); + + StopWatch stopwatch = new StopWatch(); + stopwatch.start(); + when(operation.call()).thenThrow(new DockerExecutionException()).thenAnswer(i -> { + stopwatch.stop(); + assertThat(stopwatch.getTotalTimeMillis()).isGreaterThan(100L); + return "success"; + }); + + String result = timeRetryer.runWithRetries(operation); + assertThat(result).isEqualTo("success"); + } + + @Test + public void retry_the_operation_if_it_failed_once_and_return_the_result_of_the_next_successful_call() throws Exception { + when(operation.call()).thenAnswer(MockitoMultiAnswer.of( + firstInvocation -> { + throw new DockerExecutionException(); + }, + secondInvocation -> "hola" + )); + + assertThat(retryer.runWithRetries(operation)).isEqualTo("hola"); + verify(operation, times(2)).call(); + } + + @Test + public void throw_the_last_exception_when_the_operation_fails_more_times_than_the_number_of_specified_retry_attempts() throws Exception { + DockerExecutionException finalException = new DockerExecutionException(); + + when(operation.call()).thenAnswer(MockitoMultiAnswer.of( + firstInvocation -> { + throw new DockerExecutionException(); + }, + secondInvocation -> { + throw finalException; + } + )); + + try { + retryer.runWithRetries(operation); + fail("Should have caught exception"); + } catch (DockerExecutionException actualException) { + assertThat(actualException).isEqualTo(finalException); + } + + verify(operation, times(2)).call(); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerComposeTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerComposeTests.java new file mode 100644 index 0000000000..ebb0ef8467 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/execution/RetryingDockerComposeTests.java @@ -0,0 +1,102 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.execution; + +import java.io.IOException; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; + +import org.springframework.cloud.dataflow.common.test.docker.compose.TestContainerNames; +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerName; +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Retryer.RetryableDockerOperation; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument.arguments; +import static org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption.options; + +public class RetryingDockerComposeTests { + private final DockerCompose dockerCompose = mock(DockerCompose.class); + private final Retryer retryer = mock(Retryer.class); + private final RetryingDockerCompose retryingDockerCompose = new RetryingDockerCompose(retryer, dockerCompose); + private final List someContainerNames = TestContainerNames.of("hey"); + private static final String CONTAINER_NAME = "container"; + + @Before + public void before() throws IOException, InterruptedException { + retryerJustCallsOperation(); + } + + private void retryerJustCallsOperation() throws IOException, InterruptedException { + when(retryer.runWithRetries(anyOperation())).thenAnswer(invocation -> { + Retryer.RetryableDockerOperation operation = (Retryer.RetryableDockerOperation) invocation.getArguments()[0]; + return operation.call(); + }); + } + + private static RetryableDockerOperation anyOperation() { + return any(Retryer.RetryableDockerOperation.class); + } + + @Test + public void calls_up_on_the_underlying_docker_compose() throws IOException, InterruptedException { + retryingDockerCompose.up(); + + verifyRetryerWasUsed(); + verify(dockerCompose).up(); + verifyNoMoreInteractions(dockerCompose); + } + + @Test + public void call_ps_on_the_underlying_docker_compose_and_returns_the_same_value() throws IOException, InterruptedException { + when(dockerCompose.ps()).thenReturn(someContainerNames); + + assertThat(retryingDockerCompose.ps()).isEqualTo(someContainerNames); + + verifyRetryerWasUsed(); + verify(dockerCompose).ps(); + verifyNoMoreInteractions(dockerCompose); + } + + private void verifyRetryerWasUsed() throws IOException, InterruptedException { + verify(retryer).runWithRetries(anyOperation()); + } + + private void verifyRetryerWasNotUsed() throws IOException, InterruptedException { + verify(retryer, times(0)).runWithRetries(anyOperation()); + } + + @Test + public void calls_exec_on_the_underlying_docker_compose_and_not_invoke_retryer() throws IOException, InterruptedException { + retryingDockerCompose.exec(options("-d"), CONTAINER_NAME, arguments("ls")); + verifyRetryerWasNotUsed(); + verify(dockerCompose).exec(options("-d"), CONTAINER_NAME, arguments("ls")); + } + + @Test + public void calls_run_on_the_underlying_docker_compose_and_not_invoke_retryer() throws IOException, InterruptedException { + retryingDockerCompose.run(DockerComposeRunOption.options("-d"), CONTAINER_NAME, DockerComposeRunArgument.arguments("ls")); + verifyRetryerWasNotUsed(); + verify(dockerCompose).run(DockerComposeRunOption.options("-d"), CONTAINER_NAME, DockerComposeRunArgument.arguments("ls")); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollectorTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollectorTests.java new file mode 100644 index 0000000000..1c1b249d1a --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/FileLogCollectorTests.java @@ -0,0 +1,207 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.io.IOUtils; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; + +import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.IOMatchers.containsInAnyOrder; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.IOMatchers.fileContainingString; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.IOMatchers.fileWithName; + +public class FileLogCollectorTests { + + @Rule + public TemporaryFolder logDirectoryParent = new TemporaryFolder(); + @Rule + public ExpectedException exception = ExpectedException.none(); + + private final DockerCompose compose = mock(DockerCompose.class); + private File logDirectory; + private LogCollector logCollector; + + @Before + public void before() throws IOException { + logDirectory = logDirectoryParent.newFolder(); + logCollector = new FileLogCollector(logDirectory); + } + + @Test + public void throw_exception_when_created_with_file_as_the_log_directory() throws IOException { + File file = logDirectoryParent.newFile("cannot-use"); + + exception.expect(IllegalStateException.class); + exception.expectMessage("cannot be a file"); + + new FileLogCollector(file); + } + + @Test + public void create_the_log_directory_if_it_does_not_already_exist() { + File doesNotExistYetDirectory = logDirectoryParent.getRoot() + .toPath() + .resolve("doesNotExist") + .toFile(); + new FileLogCollector(doesNotExistYetDirectory); + assertThat(doesNotExistYetDirectory.exists()).isEqualTo(true); + } + + @Test + public void throw_exception_when_created_if_the_log_directory_does_not_exist_and_cannot_be_created() { + File cannotBeCreatedDirectory = cannotBeCreatedDirectory(); + + exception.expect(IllegalArgumentException.class); + exception.expectMessage("Error making"); + exception.expectMessage(cannotBeCreatedDirectory.getAbsolutePath()); + + new FileLogCollector(cannotBeCreatedDirectory); + } + + @Test + public void not_collect_any_logs_when_no_containers_are_running() throws IOException, InterruptedException { + when(compose.services()).thenReturn(Collections.emptyList()); + logCollector.startCollecting(compose); + logCollector.stopCollecting(); + assertThat(logDirectory).isEmptyDirectory(); + } + + @Test + public void collect_logs_when_one_container_is_running_and_terminates_before_start_collecting_is_run() + throws Exception { + when(compose.services()).thenReturn(Collections.singletonList("db")); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + return false; + }); + logCollector.startCollecting(compose); + logCollector.stopCollecting(); + assertThat(logDirectory.listFiles()).have(fileWithName("db.log")); + assertThat(new File(logDirectory, "db.log")).has(fileContainingString("log")); + } + + @Test + public void collect_logs_when_one_container_is_running_and_does_not_terminate_until_after_start_collecting_is_run() + throws Exception { + when(compose.services()).thenReturn(Collections.singletonList("db")); + CountDownLatch latch = new CountDownLatch(1); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + if (!latch.await(1, TimeUnit.SECONDS)) { + throw new RuntimeException("Latch was not triggered"); + } + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + return false; + }); + logCollector.startCollecting(compose); + latch.countDown(); + logCollector.stopCollecting(); + assertThat(logDirectory.listFiles()).have(fileWithName("db.log")); + assertThat(new File(logDirectory, "db.log")).is(fileContainingString("log")); + } + + @Test + public void collect_logs_when_one_container_is_running_and_does_not_terminate() + throws IOException, InterruptedException { + when(compose.services()).thenReturn(Collections.singletonList("db")); + CountDownLatch latch = new CountDownLatch(1); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + try { + latch.await(1, TimeUnit.SECONDS); + fail("Latch was not triggered"); + } catch (InterruptedException e) { + // Success + return true; + } + fail("Latch was not triggered"); + return false; + }); + logCollector.startCollecting(compose); + logCollector.stopCollecting(); + assertThat(logDirectory.listFiles()).have(fileWithName("db.log")); + assertThat(new File(logDirectory, "db.log")).is(fileContainingString("log")); + latch.countDown(); + } + + @Test + public void collect_logs_in_parallel_for_two_containers() throws IOException, InterruptedException { + when(compose.services()).thenReturn(Arrays.asList("db", "db2")); + CountDownLatch dbLatch = new CountDownLatch(1); + when(compose.writeLogs(eq("db"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("log", outputStream); + dbLatch.countDown(); + return true; + }); + CountDownLatch db2Latch = new CountDownLatch(1); + when(compose.writeLogs(eq("db2"), any(OutputStream.class))).thenAnswer(args -> { + OutputStream outputStream = (OutputStream) args.getArguments()[1]; + IOUtils.write("other", outputStream); + db2Latch.countDown(); + return true; + }); + + logCollector.startCollecting(compose); + assertThat(dbLatch.await(1, TimeUnit.SECONDS)).isEqualTo(true); + assertThat(db2Latch.await(1, TimeUnit.SECONDS)).isEqualTo(true); + + assertThat(logDirectory.listFiles()).has(containsInAnyOrder(fileWithName("db.log"), fileWithName("db2.log"))); + assertThat(new File(logDirectory, "db.log")).is(fileContainingString("log")); + assertThat(new File(logDirectory, "db2.log")).is(fileContainingString("other")); + + logCollector.stopCollecting(); + } + + @Test + public void throw_exception_when_trying_to_start_a_started_collector_a_second_time() + throws IOException, InterruptedException { + when(compose.services()).thenReturn(Collections.singletonList("db")); + logCollector.startCollecting(compose); + exception.expect(RuntimeException.class); + exception.expectMessage("Cannot start collecting the same logs twice"); + logCollector.startCollecting(compose); + } + + private static File cannotBeCreatedDirectory() { + File cannotBeCreatedDirectory = mock(File.class); + when(cannotBeCreatedDirectory.isFile()).thenReturn(false); + when(cannotBeCreatedDirectory.mkdirs()).thenReturn(false); + when(cannotBeCreatedDirectory.getAbsolutePath()).thenReturn("cannot/exist/directory"); + return cannotBeCreatedDirectory; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectoryTest.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectoryTest.java new file mode 100644 index 0000000000..f7e67ae23b --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/logging/LogDirectoryTest.java @@ -0,0 +1,31 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.logging; + +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +public class LogDirectoryTest { + + @Test + public void gradleDockerLogsDirectory_should_use_class_simple_name() { + String directory = LogDirectory.gradleDockerLogsDirectory(SomeTestClass.class); + assertThat(directory).isEqualTo("build/dockerLogs/SomeTestClass"); + } + + private static class SomeTestClass {} +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcherTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcherTests.java new file mode 100644 index 0000000000..b59e00685e --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/AvailablePortMatcherTests.java @@ -0,0 +1,54 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import java.util.Arrays; +import java.util.List; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort; + +import static java.util.Collections.emptyList; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.springframework.cloud.dataflow.common.test.docker.compose.matchers.AvailablePortMatcher.areAvailable; + +public class AvailablePortMatcherTests { + + @Rule + public ExpectedException exception = ExpectedException.none(); + + @Test + public void succeed_when_there_are_no_unavailable_ports() { + List unavailablePorts = emptyList(); + assertThat(unavailablePorts, areAvailable()); + } + + @Test + public void throw_exception_when_there_are_some_unavailable_ports() { + List unavailablePorts = Arrays.asList(new DockerPort("0.0.0.0", 1234, 1234), + new DockerPort("1.2.3.4", 2345, 3456)); + exception.expect(AssertionError.class); + exception.expectMessage("For host with ip address: 0.0.0.0"); + exception.expectMessage("external port '1234' mapped to internal port '1234' was unavailable"); + exception.expectMessage("For host with ip address: 1.2.3.4"); + exception.expectMessage("external port '2345' mapped to internal port '3456' was unavailable"); + assertThat(unavailablePorts, areAvailable()); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/DockerMachineEnvironmentMatcher.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/DockerMachineEnvironmentMatcher.java new file mode 100644 index 0000000000..9c4861553b --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/DockerMachineEnvironmentMatcher.java @@ -0,0 +1,66 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import java.util.HashMap; +import java.util.Map; + +import org.hamcrest.Description; + +import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine; + +import static java.util.stream.Collectors.toMap; +import static org.hamcrest.Matchers.hasEntry; + +public class DockerMachineEnvironmentMatcher extends ValueCachingMatcher { + + private final Map expected; + + public DockerMachineEnvironmentMatcher(Map expected) { + this.expected = expected; + } + + @Override + public void describeTo(Description description) { + description.appendText("Docker Machine to have these environment variables:\n"); + description.appendValue(expected); + } + + @Override + protected boolean matchesSafely() { + return missingEnvironmentVariables().isEmpty(); + } + + @Override + protected void describeMismatchSafely(DockerMachine item, Description mismatchDescription) { + mismatchDescription.appendText("\nThese environment variables were missing:\n"); + mismatchDescription.appendValue(missingEnvironmentVariables()); + } + + public static DockerMachineEnvironmentMatcher containsEnvironment(Map environment) { + return new DockerMachineEnvironmentMatcher(new HashMap<>(environment)); + } + + private Map missingEnvironmentVariables() { + Map environment = value().configuredDockerComposeProcess() + .environment(); + return expected.entrySet() + .stream() + .filter(required -> !hasEntry(required.getKey(), required.getValue()).matches(environment)) + .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/IOMatchers.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/IOMatchers.java new file mode 100644 index 0000000000..98ec6cafa4 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/IOMatchers.java @@ -0,0 +1,56 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import org.apache.commons.io.FileUtils; +import org.assertj.core.api.Condition; + +public final class IOMatchers { + private IOMatchers() {} + public static Condition containsInAnyOrder(Condition... conditions) { + return new Condition<>(files -> + Arrays.stream(conditions).allMatch(condition -> Arrays.stream(files).anyMatch(condition::matches)) + , "containsInAnyOrder"); + } + public static Condition hasFiles(int numberOfFiles) { + return new Condition(dir -> dir.isDirectory() && dir.listFiles().length == numberOfFiles, "directory has " + numberOfFiles + " of files"); + } + + public static Condition fileWithName(String filename) { + return new Condition<>(file -> file.getName().equals(filename), "filename is '" + filename + "'"); + } + + public static Condition fileContainingString(String contents) { + return fileWithContents(new Condition<>(s -> s.contains(contents), "contains " + contents)); + } + + public static Condition fileWithContents(Condition contentsMatcher) { + return new Condition<>(file -> { + try { + return contentsMatcher.matches(FileUtils.readFileToString(file, StandardCharsets.UTF_8)); + } catch (IOException e) { + throw new RuntimeException(e); + } + }, "file contents"); + } + +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/ValueCachingMatcher.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/ValueCachingMatcher.java new file mode 100644 index 0000000000..59b7efc267 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/matchers/ValueCachingMatcher.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.matchers; + +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; + +public abstract class ValueCachingMatcher extends TypeSafeMatcher { + private T cachedValue; + + @Override + protected abstract void describeMismatchSafely(T item, Description mismatchDescription); + + @Override + protected boolean matchesSafely(T value) { + cachedValue = value; + return matchesSafely(); + } + + protected abstract boolean matchesSafely(); + + public T value() { + return cachedValue; + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/utils/MockitoMultiAnswer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/utils/MockitoMultiAnswer.java new file mode 100644 index 0000000000..76a8a576dd --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/java/org/springframework/cloud/dataflow/common/test/docker/compose/utils/MockitoMultiAnswer.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.common.test.docker.compose.utils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +public class MockitoMultiAnswer implements Answer { + private final List> invocationHandlers; + private int numberOfTimesCalled = 0; + + public MockitoMultiAnswer(List> invocationHandlers) { + this.invocationHandlers = new ArrayList<>(invocationHandlers); + } + + @SafeVarargs + public static MockitoMultiAnswer of(Function... invocationHandlers) { + return new MockitoMultiAnswer<>(Arrays.asList(invocationHandlers)); + } + + @Override + public T answer(InvocationOnMock invocation) throws Throwable { + if (numberOfTimesCalled >= invocationHandlers.size()) { + throw new RuntimeException("Called more times than supported"); + } + + Function invocationHandler = invocationHandlers.get(numberOfTimesCalled); + numberOfTimesCalled++; + return invocationHandler.apply(invocation); + } +} diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/docker-compose-cp1.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/docker-compose-cp1.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/logback-test.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/logback-test.xml new file mode 100644 index 0000000000..a8b7f0a4d7 --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/logback-test.xml @@ -0,0 +1,7 @@ + + + + + + + diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/native-healthcheck.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/native-healthcheck.yaml new file mode 100644 index 0000000000..e7d566f8db --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/native-healthcheck.yaml @@ -0,0 +1,9 @@ +services: + withHealthcheck: + image: gliderlabs/alpine:3.4 + command: sh -c 'while true; do sleep 10; done' + healthcheck: + test: ["CMD", "test", "-f", "healthy"] + interval: 100ms + timeout: 1s + retries: 1 diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/no-healthcheck.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/no-healthcheck.yaml new file mode 100644 index 0000000000..0006d008ca --- /dev/null +++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/no-healthcheck.yaml @@ -0,0 +1,4 @@ +services: + noHealthcheck: + image: gliderlabs/alpine:3.4 + command: sh -c 'while true; do sleep 10; done' diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/compose/docker-compose-cp2.yaml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/compose/docker-compose-cp2.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/spring-cloud-dataflow-completion/pom.xml b/spring-cloud-dataflow-completion/pom.xml index 89855b556c..21090c50a3 100644 --- a/spring-cloud-dataflow-completion/pom.xml +++ b/spring-cloud-dataflow-completion/pom.xml @@ -1,24 +1,35 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-completion + spring-cloud-dataflow-completion + Spring Cloud Data Flow Completion + + true + 3.4.1 + org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.boot @@ -26,4 +37,45 @@ test + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + 1 + 1 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/BootVersionsCompletionProviderTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/BootVersionsCompletionProviderTests.java deleted file mode 100644 index 9a7a8bdfea..0000000000 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/BootVersionsCompletionProviderTests.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright 2019-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.cloud.dataflow.completion; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; - -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.mock.mockito.MockBean; -import org.springframework.cloud.dataflow.audit.service.DefaultAuditRecordService; -import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; -import org.springframework.cloud.dataflow.configuration.metadata.BootApplicationConfigurationMetadataResolver; -import org.springframework.cloud.dataflow.configuration.metadata.container.ContainerImageMetadataResolver; -import org.springframework.cloud.dataflow.configuration.metadata.container.DefaultContainerImageMetadataResolver; -import org.springframework.cloud.dataflow.container.registry.ContainerRegistryService; -import org.springframework.cloud.dataflow.core.AppRegistration; -import org.springframework.cloud.dataflow.core.ApplicationType; -import org.springframework.cloud.dataflow.core.DefaultStreamDefinitionService; -import org.springframework.cloud.dataflow.core.StreamDefinitionService; -import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository; -import org.springframework.cloud.dataflow.registry.service.AppRegistryService; -import org.springframework.cloud.dataflow.registry.service.DefaultAppRegistryService; -import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; -import org.springframework.cloud.deployer.resource.maven.MavenProperties; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.core.io.FileSystemResourceLoader; -import org.springframework.test.context.junit4.SpringRunner; - -import static org.hamcrest.CoreMatchers.hasItems; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; -import static org.mockito.Mockito.mock; - -/** - * Tests that the completion mechanism knows how to cope with different versions of Spring - * Boot, including using reflection on classes packaged in the boot archive when needed - * (e.g. enum values completion). - * - * @author Eric Bottard - * @author Christian Tzolov - */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { CompletionConfiguration.class, - BootVersionsCompletionProviderTests.Mocks.class }, properties = { - "spring.main.allow-bean-definition-overriding=true" }) -@SuppressWarnings("unchecked") -public class BootVersionsCompletionProviderTests { - - @Autowired - private StreamCompletionProvider completionProvider; - - @Test - public void testBoot13Layout() { - List result = completionProvider.complete("boot13 --", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot13 --level=")), Proposals.proposalThat(is("boot13 --number=")), - Proposals.proposalThat(is("boot13 --some-string=")))); - - // Test that custom classes can also be loaded correctly - result = completionProvider.complete("boot13 --level=", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot13 --level=low")), Proposals.proposalThat(is("boot13 --level=high")))); - - result = completionProvider.complete("boot13 --number=", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot13 --number=one")), Proposals.proposalThat(is("boot13 --number=two")))); - } - - @Test - public void testBoot14Layout() { - List result = completionProvider.complete("boot14 --", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot14 --level=")), Proposals.proposalThat(is("boot14 --number=")), - Proposals.proposalThat(is("boot14 --some-string=")))); - - // Test that custom classes can also be loaded correctly - result = completionProvider.complete("boot14 --level=", 0); - assertThat(result, - hasItems(Proposals.proposalThat(is("boot14 --level=very_low")), Proposals.proposalThat(is("boot14 --level=very_high")))); - - result = completionProvider.complete("boot14 --number=", 0); - assertThat(result, hasItems(Proposals.proposalThat(is("boot14 --number=one")), Proposals.proposalThat(is("boot14 --number=two")))); - - } - - /** - * A set of mocks that consider the contents of the {@literal boot_versions/} - * directory as app archives. - * - * @author Eric Bottard - * @author Mark Fisher - */ - @Configuration - public static class Mocks { - - private static final File ROOT = new File("src/test/resources", - BootVersionsCompletionProviderTests.Mocks.class.getPackage().getName().replace('.', '/') - + "/boot_versions"); - - @MockBean - private DefaultContainerImageMetadataResolver containerImageMetadataResolver; - - @Bean - @ConditionalOnMissingBean - public StreamDefinitionService streamDefinitionService() { - return new DefaultStreamDefinitionService(); - } - - @Bean - public AppRegistryService appRegistry() { - - return new DefaultAppRegistryService(mock(AppRegistrationRepository.class), - new AppResourceCommon(new MavenProperties(), new FileSystemResourceLoader()), - mock(DefaultAuditRecordService.class)) { - - @Override - public boolean appExist(String name, ApplicationType type) { - return false; - } - - @Override - public List findAll() { - List result = new ArrayList<>(); - result.add(find("boot13", ApplicationType.source)); - result.add(find("boot14", ApplicationType.source)); - return result; - } - - @Override - public AppRegistration find(String name, ApplicationType type) { - String filename = name + "-1.0.0.BUILD-SNAPSHOT.jar"; - File file = new File(ROOT, filename); - if (file.exists()) { - return new AppRegistration(name, type, file.toURI(), file.toURI()); - } - else { - return null; - } - } - - @Override - public AppRegistration save(AppRegistration app) { - return null; - } - - @Override - protected boolean isOverwrite(AppRegistration app, boolean overwrite) { - return false; - } - }; - } - - @MockBean - ContainerRegistryService containerRegistryService; - - @Bean - public ContainerImageMetadataResolver containerImageMetadataResolver(ContainerRegistryService containerRegistryService) { - return new DefaultContainerImageMetadataResolver(containerRegistryService); - } - - @Bean - public ApplicationConfigurationMetadataResolver metadataResolver() { - return new BootApplicationConfigurationMetadataResolver( - StreamCompletionProviderTests.class.getClassLoader(), containerImageMetadataResolver); - } - } -} diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java index 2969578576..8d0efee564 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/CompletionUtilsTests.java @@ -16,37 +16,43 @@ package org.springframework.cloud.dataflow.completion; -import org.junit.Assert; -import org.junit.Test; + +import java.util.LinkedList; + +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.DefaultStreamDefinitionService; +import org.springframework.cloud.dataflow.core.StreamAppDefinition; import org.springframework.cloud.dataflow.core.StreamDefinition; import org.springframework.cloud.dataflow.core.StreamDefinitionService; -import static org.hamcrest.core.Is.is; +import static org.assertj.core.api.Assertions.assertThat; /** * Unit tests for CompletionUtils. * * @author Eric Bottard + * @author Corneil du Plessis */ -public class CompletionUtilsTests { +class CompletionUtilsTests { final StreamDefinitionService streamDefinitionService = new DefaultStreamDefinitionService(); @Test - public void testLabelQualification() { + void labelQualification() { StreamDefinition streamDefinition = new StreamDefinition("foo", "http | filter"); - Assert.assertThat(CompletionUtils.maybeQualifyWithLabel("filter", - this.streamDefinitionService.getAppDefinitions(streamDefinition)), is("filter2: filter")); + LinkedList appDefinitions = this.streamDefinitionService.getAppDefinitions(streamDefinition); + assertThat(CompletionUtils.maybeQualifyWithLabel("filter", appDefinitions)) + .isEqualTo("filter2: filter"); streamDefinition = new StreamDefinition("foo", "http | filter"); - Assert.assertThat(CompletionUtils.maybeQualifyWithLabel("transform", - this.streamDefinitionService.getAppDefinitions(streamDefinition)), is("transform")); + appDefinitions = this.streamDefinitionService.getAppDefinitions(streamDefinition); + assertThat(CompletionUtils.maybeQualifyWithLabel("transform", appDefinitions)) + .isEqualTo("transform"); streamDefinition = new StreamDefinition("foo", "http | filter | filter2: filter"); - Assert.assertThat(CompletionUtils.maybeQualifyWithLabel("filter", - this.streamDefinitionService.getAppDefinitions(streamDefinition)), is("filter3: filter")); + appDefinitions = this.streamDefinitionService.getAppDefinitions(streamDefinition); + assertThat(CompletionUtils.maybeQualifyWithLabel("filter", appDefinitions)).isEqualTo("filter3: filter"); } } diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java index d5fd3be480..6a5cbb5d43 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/Proposals.java @@ -16,20 +16,41 @@ package org.springframework.cloud.dataflow.completion; -import org.hamcrest.FeatureMatcher; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.assertj.core.api.Condition; /** * Contains helper Hamcrest matchers for testing completion proposal related code. * * @author Eric Bottard + * @author Corneil du Plessis */ class Proposals { - static org.hamcrest.Matcher proposalThat(org.hamcrest.Matcher matcher) { - return new FeatureMatcher(matcher, "a proposal whose text", "text") { - @Override - protected String featureValueOf(CompletionProposal actual) { - return actual.getText(); - } - }; + static Condition proposal(String text) { + return new Condition<>(actual -> text.equals(actual.getText()) , "text:" + text); + } + static Condition proposal(Predicate check) { + return new Condition<>(actual -> check.test(actual.getText()) , "check"); + } + static boolean hasAny(List proposals, String ... text) { + Set items = new HashSet<>(Arrays.asList(text)); + return proposals.stream().anyMatch(item -> items.contains(item.getText())); + } + static boolean hasAll(List proposals, String ... text) { + Set items = new HashSet<>(Arrays.asList(text)); + Set proposalTextItems = proposals.stream().map(completionProposal -> completionProposal.getText()).collect(Collectors.toSet()); + return items.stream().allMatch(proposalTextItems::contains); + } + static Condition> any(String ... text) { + return new Condition<>(actual-> hasAny(actual, text), "hasAny"); + } + static Condition> all(String ... text) { + return new Condition<>(actual-> hasAll(actual, text), "hasAll"); } } diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java index 7f105a46a8..3571c1a13d 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/StreamCompletionProviderTests.java @@ -16,20 +16,13 @@ package org.springframework.cloud.dataflow.completion; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; -import org.springframework.test.context.junit4.SpringRunner; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Integration tests for StreamCompletionProvider. @@ -42,154 +35,140 @@ * * @author Eric Bottard * @author Mark Fisher + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { CompletionConfiguration.class, CompletionTestsMocks.class }, properties = { - "spring.main.allow-bean-definition-overriding=true" }) +@SpringBootTest(classes = {CompletionConfiguration.class, CompletionTestsMocks.class}, properties = { + "spring.main.allow-bean-definition-overriding=true"}) @SuppressWarnings("unchecked") -public class StreamCompletionProviderTests { +class StreamCompletionProviderTests { @Autowired private StreamCompletionProvider completionProvider; - @Test // => file,http,etc - public void testEmptyStartShouldProposeSourceOrUnboundApps() { - assertThat(completionProvider.complete("", 1), hasItems(Proposals.proposalThat(is("orange")), - Proposals.proposalThat(is("http")), Proposals.proposalThat(is("hdfs")))); - assertThat(completionProvider.complete("", 1), not(hasItems(Proposals.proposalThat(is("log"))))); + @Test + void emptyStartShouldProposeSourceOrUnboundApps() { + assertThat(completionProvider.complete("", 1)).has(Proposals.all("orange", "http", "hdfs")); + assertThat(completionProvider.complete("", 1)).doNotHave(Proposals.proposal("log")); } - @Test // fi => file - public void testUnfinishedAppNameShouldReturnCompletions() { - assertThat(completionProvider.complete("h", 1), hasItems(Proposals.proposalThat(is("http")), Proposals.proposalThat(is("hdfs")))); - assertThat(completionProvider.complete("ht", 1), hasItems(Proposals.proposalThat(is("http")))); - assertThat(completionProvider.complete("ht", 1), not(hasItems(Proposals.proposalThat(is("hdfs"))))); + @Test + void unfinishedAppNameShouldReturnCompletions() { + assertThat(completionProvider.complete("h", 1)).has(Proposals.all("http", "hdfs")); + assertThat(completionProvider.complete("ht", 1)).has(Proposals.all("http")); + assertThat(completionProvider.complete("ht", 1)).doNotHave(Proposals.proposal("hdfs")); } @Test - public void testUnfinishedUnboundAppNameShouldReturnCompletions2() { - assertThat(completionProvider.complete("", 1), hasItems(Proposals.proposalThat(is("orange")))); - assertThat(completionProvider.complete("o", 1), hasItems(Proposals.proposalThat(is("orange")))); - assertThat(completionProvider.complete("oran", 1), hasItems(Proposals.proposalThat(is("orange")))); - assertThat(completionProvider.complete("orange", 1), hasItems(Proposals.proposalThat(is("orange --expression=")), - Proposals.proposalThat(is("orange --fooble=")),Proposals.proposalThat(is("orange --expresso=")))); - assertThat(completionProvider.complete("o1: orange||", 1), hasItems(Proposals.proposalThat(is("o1: orange|| orange")))); - assertThat(completionProvider.complete("o1: orange|| ", 1), hasItems(Proposals.proposalThat(is("o1: orange|| orange")))); - assertThat(completionProvider.complete("o1: orange ||", 1), hasItems(Proposals.proposalThat(is("o1: orange || orange")))); - assertThat(completionProvider.complete("o1: orange|| or", 1), hasItems(Proposals.proposalThat(is("o1: orange|| orange")))); - assertThat(completionProvider.complete("http | o", 1), empty()); - assertThat(completionProvider.complete("http|| o", 1), hasItems(Proposals.proposalThat(is("http|| orange")))); + void unfinishedUnboundAppNameShouldReturnCompletions2() { + assertThat(completionProvider.complete("", 1)).has(Proposals.all("orange")); + assertThat(completionProvider.complete("o", 1)).has(Proposals.all("orange")); + assertThat(completionProvider.complete("oran", 1)).has(Proposals.all("orange")); + assertThat(completionProvider.complete("orange", 1)).has(Proposals.all("orange --expression=","orange --fooble=", "orange --expresso=")); + assertThat(completionProvider.complete("o1: orange||", 1)).has(Proposals.all("o1: orange|| orange")); + assertThat(completionProvider.complete("o1: orange|| ", 1)).has(Proposals.all("o1: orange|| orange")); + assertThat(completionProvider.complete("o1: orange ||", 1)).has(Proposals.all("o1: orange || orange")); + assertThat(completionProvider.complete("o1: orange|| or", 1)).has(Proposals.all("o1: orange|| orange")); + assertThat(completionProvider.complete("http | o", 1)).isEmpty(); + assertThat(completionProvider.complete("http|| o", 1)).has(Proposals.all("http|| orange")); } - @Test // file | filter => file | filter | foo, etc - public void testValidSubStreamDefinitionShouldReturnPipe() { - assertThat(completionProvider.complete("http | filter ", 1), hasItems(Proposals.proposalThat(is("http | filter | log")))); - assertThat(completionProvider.complete("http | filter ", 1), - not(hasItems(Proposals.proposalThat(is("http | filter | http"))))); + @Test + void validSubStreamDefinitionShouldReturnPipe() { + assertThat(completionProvider.complete("http | filter ", 1)).has(Proposals.all("http | filter | log")); + assertThat(completionProvider.complete("http | filter ", 1)).doNotHave(Proposals.proposal("http | filter | http")); } - @Test // file | filter => file | filter --foo=, etc - public void testValidSubStreamDefinitionShouldReturnAppOptions() { - assertThat(completionProvider.complete("http | filter ", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + @Test + void validSubStreamDefinitionShouldReturnAppOptions() { + assertThat(completionProvider.complete("http | filter ", 1)).has(Proposals.all("http | filter --expression=", "http | filter --expresso=")); // Same as above, no final space - assertThat(completionProvider.complete("http | filter", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + assertThat(completionProvider.complete("http | filter", 1)).has(Proposals.all("http | filter --expression=", "http | filter --expresso=")); } - @Test // file | filter - => file | filter --foo,etc - public void testOneDashShouldReturnTwoDashes() { - assertThat(completionProvider.complete("http | filter -", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + @Test + void oneDashShouldReturnTwoDashes() { + assertThat(completionProvider.complete("http | filter -", 1)).has(Proposals.all("http | filter --expression=", "http | filter --expresso=")); } - @Test // file | filter -- => file | filter --foo,etc - public void testTwoDashesShouldReturnOptions() { - assertThat(completionProvider.complete("http | filter --", 1), hasItems( - Proposals.proposalThat(is("http | filter --expression=")), Proposals.proposalThat(is("http | filter --expresso=")))); + @Test + void twoDashesShouldReturnOptions() { + assertThat(completionProvider.complete("http | filter --", 1)).has(Proposals.all("http | filter --expression=", "http | filter --expresso=")); } - @Test // file | => file | foo,etc - public void testDanglingPipeShouldReturnExtraApps() { - assertThat(completionProvider.complete("http |", 1), hasItems(Proposals.proposalThat(is("http | filter")))); - assertThat(completionProvider.complete("http | filter |", 1), - hasItems(Proposals.proposalThat(is("http | filter | log")), Proposals.proposalThat(is("http | filter | filter2: filter")))); + @Test + void danglingPipeShouldReturnExtraApps() { + assertThat(completionProvider.complete("http |", 1)).has(Proposals.all("http | filter")); + assertThat(completionProvider.complete("http | filter |", 1)).has(Proposals.all("http | filter | log", "http | filter | filter2: filter")); } - @Test // file --p => file --preventDuplicates=, file --pattern= - public void testUnfinishedOptionNameShouldComplete() { - assertThat(completionProvider.complete("http --p", 1), hasItems(Proposals.proposalThat(is("http --port=")))); + @Test + void unfinishedOptionNameShouldComplete() { + assertThat(completionProvider.complete("http --p", 1)).has(Proposals.all("http --port=")); } - @Test // file | counter --name=foo --inputType=bar => we're done - public void testSinkWithAllOptionsSetCantGoFurther() { - assertThat(completionProvider.complete("http | log --port=1234 --level=debug", 1), empty()); + @Test + void sinkWithAllOptionsSetCantGoFurther() { + assertThat(completionProvider.complete("http | log --port=1234 --level=debug", 1)).isEmpty(); } - @Test // file | counter --name= => nothing - public void testInGenericOptionValueCantProposeAnything() { - assertThat(completionProvider.complete("http --port=", 1), empty()); + @Test + void inGenericOptionValueCantProposeAnything() { + assertThat(completionProvider.complete("http --port=", 1)).isEmpty(); } - @Test // :foo > ==> add app names - public void testDestinationIntoApps() { - assertThat(completionProvider.complete(":foo >", 1), - hasItems(Proposals.proposalThat(is(":foo > filter")), Proposals.proposalThat(is(":foo > log")))); - assertThat(completionProvider.complete(":foo >", 1), not(hasItems(Proposals.proposalThat(is(":foo > http"))))); + @Test + void destinationIntoApps() { + assertThat(completionProvider.complete(":foo >", 1)).has(Proposals.all(":foo > filter", ":foo > log")); + assertThat(completionProvider.complete(":foo >", 1)).doNotHave(Proposals.proposal(":foo > http")); } - @Test // :foo > ==> add app names - public void testDestinationIntoAppsVariant() { - assertThat(completionProvider.complete(":foo >", 1), - hasItems(Proposals.proposalThat(is(":foo > filter")), Proposals.proposalThat(is(":foo > log")))); + @Test + void destinationIntoAppsVariant() { + assertThat(completionProvider.complete(":foo >", 1)).has(Proposals.all(":foo > filter", ":foo > log")); } - @Test // http (no space) => NOT "http2: http" - public void testAutomaticAppLabellingDoesNotGetInTheWay() { - assertThat(completionProvider.complete("http", 1), not(hasItems(Proposals.proposalThat(is("http2: http"))))); + @Test + void automaticAppLabellingDoesNotGetInTheWay() { + assertThat(completionProvider.complete("http", 1)).doNotHave(Proposals.proposal("http2: http")); } - @Test // http --use-ssl= => propose true|false - public void testValueHintForBooleans() { - assertThat(completionProvider.complete("http --use-ssl=", 1), - hasItems(Proposals.proposalThat(is("http --use-ssl=true")), Proposals.proposalThat(is("http --use-ssl=false")))); + @Test + void valueHintForBooleans() { + assertThat(completionProvider.complete("http --use-ssl=", 1)).has(Proposals.all("http --use-ssl=true", "http --use-ssl=false")); } - @Test // .. foo --enum-value= => propose enum values - public void testValueHintForEnums() { - assertThat(completionProvider.complete("http | filter --expresso=", 1), - hasItems(Proposals.proposalThat(is("http | filter --expresso=SINGLE")), - Proposals.proposalThat(is("http | filter --expresso=DOUBLE")))); + @Test + void valueHintForEnums() { + assertThat(completionProvider.complete("http | filter --expresso=", 1)).has(Proposals.all("http | filter --expresso=SINGLE", "http | filter --expresso=DOUBLE")); } @Test - public void testUnrecognizedPrefixesDontBlowUp() { - assertThat(completionProvider.complete("foo", 1), empty()); - assertThat(completionProvider.complete("foo --", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=prefix", 1), empty()); + void unrecognizedPrefixesDontBlowUp() { + assertThat(completionProvider.complete("foo", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=prefix", 1)).isEmpty(); assertThat( completionProvider.complete( - "http | filter --port=12 --expression=something " + "--expresso=not-a-valid-prefix", 1), - empty()); + "http | filter --port=12 --expression=something " + "--expresso=not-a-valid-prefix", 1)).isEmpty(); } /* @@ -197,9 +176,8 @@ public void testUnrecognizedPrefixesDontBlowUp() { * "...=tr --other.prop" */ @Test - public void testClosedSetValuesShouldBeExclusive() { - assertThat(completionProvider.complete("http --use-ssl=tr", 1), - not(hasItems(Proposals.proposalThat(startsWith("http --use-ssl=tr --port"))))); + void closedSetValuesShouldBeExclusive() { + assertThat(completionProvider.complete("http --use-ssl=tr", 1)).doNotHave(Proposals.proposal(s-> s.startsWith("http --use-ssl=tr --port"))); } } diff --git a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java index 08db8a78c5..9a27d3f23d 100644 --- a/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java +++ b/spring-cloud-dataflow-completion/src/test/java/org/springframework/cloud/dataflow/completion/TaskCompletionProviderTests.java @@ -16,20 +16,13 @@ package org.springframework.cloud.dataflow.completion; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; -import org.springframework.test.context.junit4.SpringRunner; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.startsWith; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; /** * Integration tests for TaskCompletionProvider. @@ -43,90 +36,90 @@ * @author Eric Bottard * @author Mark Fisher * @author Andy Clement + * @author Corneil du Plessis */ @SuppressWarnings("unchecked") -@RunWith(SpringRunner.class) -@SpringBootTest(classes = { CompletionConfiguration.class, CompletionTestsMocks.class }, properties = { - "spring.main.allow-bean-definition-overriding=true" }) -public class TaskCompletionProviderTests { +@SpringBootTest(classes = {CompletionConfiguration.class, CompletionTestsMocks.class}, properties = { + "spring.main.allow-bean-definition-overriding=true"}) +class TaskCompletionProviderTests { @Autowired private TaskCompletionProvider completionProvider; - @Test // => basic,plum,etc - public void testEmptyStartShouldProposeSourceApps() { - assertThat(completionProvider.complete("", 1), hasItems(Proposals.proposalThat(is("basic")), Proposals.proposalThat(is("plum")))); - assertThat(completionProvider.complete("", 1), not(hasItems(Proposals.proposalThat(is("log"))))); + @Test + void emptyStartShouldProposeSourceApps() { + assertThat(completionProvider.complete("", 1)).has(Proposals.all("basic", "plum")); + assertThat(completionProvider.complete("", 1)).doNotHave(Proposals.proposal("log")); } - @Test // b => basic - public void testUnfinishedAppNameShouldReturnCompletions() { - assertThat(completionProvider.complete("b", 1), hasItems(Proposals.proposalThat(is("basic")))); - assertThat(completionProvider.complete("ba", 1), hasItems(Proposals.proposalThat(is("basic")))); - assertThat(completionProvider.complete("pl", 1), not(hasItems(Proposals.proposalThat(is("basic"))))); + @Test + void unfinishedAppNameShouldReturnCompletions() { + assertThat(completionProvider.complete("b", 1)).has(Proposals.all("basic")); + assertThat(completionProvider.complete("ba", 1)).has(Proposals.all("basic")); + assertThat(completionProvider.complete("pl", 1)).doNotHave(Proposals.proposal("basic")); } - @Test // basic => basic --foo=, etc - public void testValidTaskDefinitionShouldReturnAppOptions() { - assertThat(completionProvider.complete("basic ", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + @Test + void validTaskDefinitionShouldReturnAppOptions() { + assertThat(completionProvider.complete("basic ", 1)) + .has(Proposals.all("basic --expression=", "basic --expresso=")); // Same as above, no final space - assertThat(completionProvider.complete("basic", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + assertThat(completionProvider.complete("basic", 1)) + .has(Proposals.all("basic --expression=", "basic --expresso=")); } - @Test // file | filter - => file | filter --foo,etc - public void testOneDashShouldReturnTwoDashes() { - assertThat(completionProvider.complete("basic -", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + @Test + void oneDashShouldReturnTwoDashes() { + assertThat(completionProvider.complete("basic -", 1)) + .has(Proposals.all("basic --expression=", "basic --expresso=")); } - @Test // basic -- => basic --foo,etc - public void testTwoDashesShouldReturnOptions() { - assertThat(completionProvider.complete("basic --", 1), - hasItems(Proposals.proposalThat(is("basic --expression=")), Proposals.proposalThat(is("basic --expresso=")))); + @Test + void twoDashesShouldReturnOptions() { + assertThat(completionProvider.complete("basic --", 1)) + .has(Proposals.all("basic --expression=", "basic --expresso=")); } - @Test // file --p => file --preventDuplicates=, file --pattern= - public void testUnfinishedOptionNameShouldComplete() { - assertThat(completionProvider.complete("basic --foo", 1), hasItems(Proposals.proposalThat(is("basic --fooble=")))); + @Test + void unfinishedOptionNameShouldComplete() { + assertThat(completionProvider.complete("basic --foo", 1)).has(Proposals.all("basic --fooble=")); } - @Test // file | counter --name= => nothing - public void testInGenericOptionValueCantProposeAnything() { - assertThat(completionProvider.complete("basic --expression=", 1), empty()); + @Test + void inGenericOptionValueCantProposeAnything() { + assertThat(completionProvider.complete("basic --expression=", 1)).isEmpty(); } - @Test // plum --use-ssl= => propose true|false - public void testValueHintForBooleans() { - assertThat(completionProvider.complete("plum --use-ssl=", 1), - hasItems(Proposals.proposalThat(is("plum --use-ssl=true")), Proposals.proposalThat(is("plum --use-ssl=false")))); + @Test + void valueHintForBooleans() { + assertThat(completionProvider.complete("plum --use-ssl=", 1)) + .has(Proposals.all("plum --use-ssl=true", "plum --use-ssl=false")); } - @Test // basic --enum-value= => propose enum values - public void testValueHintForEnums() { - assertThat(completionProvider.complete("basic --expresso=", 1), - hasItems(Proposals.proposalThat(is("basic --expresso=SINGLE")), Proposals.proposalThat(is("basic --expresso=DOUBLE")))); + @Test + void valueHintForEnums() { + assertThat(completionProvider.complete("basic --expresso=", 1)) + .has(Proposals.all("basic --expresso=SINGLE", "basic --expresso=DOUBLE")); } @Test - public void testUnrecognizedPrefixesDontBlowUp() { - assertThat(completionProvider.complete("foo", 1), empty()); - assertThat(completionProvider.complete("foo --", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption", 1), empty()); - assertThat(completionProvider.complete("http --notavalidoption=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=", 1), empty()); - assertThat(completionProvider.complete("foo --some-option=prefix", 1), empty()); + void unrecognizedPrefixesDontBlowUp() { + assertThat(completionProvider.complete("foo", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption", 1)).isEmpty(); + assertThat(completionProvider.complete("http --notavalidoption=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=", 1)).isEmpty(); + assertThat(completionProvider.complete("foo --some-option=prefix", 1)).isEmpty(); } /* @@ -134,8 +127,8 @@ public void testUnrecognizedPrefixesDontBlowUp() { * "--expresso=s --other.prop" */ @Test - public void testClosedSetValuesShouldBeExclusive() { - assertThat(completionProvider.complete("basic --expresso=s", 1), - not(hasItems(Proposals.proposalThat(startsWith("basic --expresso=s --fooble"))))); + void closedSetValuesShouldBeExclusive() { + assertThat(completionProvider.complete("basic --expresso=s", 1)) + .doNotHave(Proposals.proposal(s -> s.startsWith("basic --expresso=s --fooble"))); } } diff --git a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/README.txt b/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/README.txt deleted file mode 100644 index 9040967d87..0000000000 --- a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/README.txt +++ /dev/null @@ -1 +0,0 @@ -The contents of these 2 boot uberjars has been created using the src/test/support maven project(s). diff --git a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot13-1.0.0.BUILD-SNAPSHOT.jar b/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot13-1.0.0.BUILD-SNAPSHOT.jar deleted file mode 100644 index b542bf21fd..0000000000 Binary files a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot13-1.0.0.BUILD-SNAPSHOT.jar and /dev/null differ diff --git a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot14-1.0.0.BUILD-SNAPSHOT.jar b/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot14-1.0.0.BUILD-SNAPSHOT.jar deleted file mode 100644 index b169b3cb39..0000000000 Binary files a/spring-cloud-dataflow-completion/src/test/resources/org/springframework/cloud/dataflow/completion/boot_versions/boot14-1.0.0.BUILD-SNAPSHOT.jar and /dev/null differ diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/pom.xml b/spring-cloud-dataflow-completion/src/test/support/boot13/pom.xml deleted file mode 100644 index f2ac85f486..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/pom.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - 4.0.0 - com.acme - boot13 - 1.0.0.BUILD-SNAPSHOT - - org.springframework.boot - spring-boot-starter-parent - 1.3.0.RELEASE - - - - UTF-8 - - - - - org.springframework.boot - spring-boot-starter - 1.3.0.RELEASE - - - org.springframework.boot - spring-boot-configuration-processor - 1.3.0.RELEASE - true - - - com.acme - common - 1.0.0.BUILD-SNAPSHOT - - - - - - org.springframework.boot - spring-boot-maven-plugin - 1.3.0.RELEASE - - - - diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/resources/META-INF/dataflow-configuration-metadata.properties b/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/resources/META-INF/dataflow-configuration-metadata.properties deleted file mode 100644 index d38019ab9e..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/resources/META-INF/dataflow-configuration-metadata.properties +++ /dev/null @@ -1,3 +0,0 @@ -configuration-properties.classes=\ - com.acme.boot13.MyConfigProperties13, \ - com.acme.common.ConfigProperties diff --git a/spring-cloud-dataflow-completion/src/test/support/boot14/pom.xml b/spring-cloud-dataflow-completion/src/test/support/boot14/pom.xml deleted file mode 100644 index abb354872f..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot14/pom.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - 4.0.0 - com.acme - boot14 - 1.0.0.BUILD-SNAPSHOT - - org.springframework.boot - spring-boot-starter-parent - 1.4.0.RELEASE - - - - UTF-8 - - - - - org.springframework.boot - spring-boot-starter - 1.4.0.RELEASE - - - org.springframework.boot - spring-boot-configuration-processor - 1.4.0.RELEASE - true - - - com.acme - common - 1.0.0.BUILD-SNAPSHOT - - - - - - org.springframework.boot - spring-boot-maven-plugin - 1.4.0.RELEASE - - - - diff --git a/spring-cloud-dataflow-completion/src/test/support/boot14/src/main/resources/META-INF/dataflow-configuration-metadata.properties b/spring-cloud-dataflow-completion/src/test/support/boot14/src/main/resources/META-INF/dataflow-configuration-metadata.properties deleted file mode 100644 index 764fc7c1c5..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/boot14/src/main/resources/META-INF/dataflow-configuration-metadata.properties +++ /dev/null @@ -1,3 +0,0 @@ -configuration-properties.classes=\ - com.acme.boot14.MyConfigProperties14, \ - com.acme.common.ConfigProperties diff --git a/spring-cloud-dataflow-completion/src/test/support/common/pom.xml b/spring-cloud-dataflow-completion/src/test/support/common/pom.xml deleted file mode 100644 index 44ade52df7..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/common/pom.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - 4.0.0 - com.acme - common - 1.0.0.BUILD-SNAPSHOT - - org.springframework.boot - spring-boot-starter-parent - 1.3.0.RELEASE - - - - UTF-8 - - - - - org.springframework.boot - spring-boot-starter - 1.3.0.RELEASE - - - org.springframework.boot - spring-boot-configuration-processor - 1.3.0.RELEASE - true - - - diff --git a/spring-cloud-dataflow-completion/src/test/support/pom.xml b/spring-cloud-dataflow-completion/src/test/support/pom.xml deleted file mode 100644 index 583fd030b1..0000000000 --- a/spring-cloud-dataflow-completion/src/test/support/pom.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - 4.0.0 - com.acme - parent - 1.0.0.BUILD-SNAPSHOT - pom - - UTF-8 - - - - common - boot13 - boot14 - - diff --git a/spring-cloud-dataflow-composed-task-runner/README.adoc b/spring-cloud-dataflow-composed-task-runner/README.adoc new file mode 100644 index 0000000000..144a11ea8c --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/README.adoc @@ -0,0 +1,144 @@ +//tag::ref-doc[] +:image-root: images + += Composed Task Runner + +A task that executes a tasks in a directed graph as specified by a DSL that is +passed in via the `--graph` command line argument. + +== Overview +The Composed Task Runner parses the graph DSL and for each node in the graph it +will execute a restful call against a specified https://docs.spring.io/spring-cloud-dataflow/docs/current/reference/htmlsingle/[Spring Cloud Data Flow] +instance to launch the associated task definition. For each task definition that is executed the +Composed Task Runner will poll the database to verify that the task completed. +Once complete the Composed Task Runner will either continue to the next task in +the graph or fail based on how the DSL specified the sequence of tasks should +be executed. + +== Graph DSL + +The Graph DSL is comprised of Task Definitions that have been defined within +the Spring Cloud Data Flow server referenced by the data-flow-uri +(default: http://localhost:9393). +These definitions can be placed into a derived graph based on a DSL through +the use of sequences, transitions, splits, or a combination therein. + +== Traversing the graph +Composed Task Runner is built using +https://docs.spring.io/spring-batch/reference/html/[Spring Batch] +to execute the directed graph. As such each node in the graph is a +https://docs.spring.io/spring-batch/reference/html/domain.html#domainStep[Step]. +As discussed in the overview, each step in the graph will post a request to a +Spring Cloud Data Flow Server to execute a task definition. If the task launched by +the step fails to complete within the time specified by the `maxWaitTime` +property, a +org.springframework.cloud.task.app.composedtaskrunner.support.TimeoutException +will be thrown. Once task launched by the step completes, +the ComposedTaskRunner will set the `ExitStatus` of that step based on the following rules: + +* If the `TaskExecution` has an `ExitMessage` that will be used as the `ExitStatus` +* If no `ExitMessage` is present and the `ExitCode` is set to 0 then the `ExitStatus` +for the step will be `COMPLETED`. +* If no `ExitMessage` is present and the `ExitCode` is set to 1 then the `ExitStatus` +for the step will be `FAILED`. + +If the state of any step in the graph is set to FAILED and is not handled by +the DSL the Directed Graph execution will terminate. + +=== Sequences +The Composed Task Runner supports the ability to traverse sequences of task +definitions. This is represented by a task definition name followed by the +`&&` symbol then the next task definition to be launched. +For example if we have tasks AAA, BBB and CCC to be launched in sequence it +will look like this: +``` +AAA && BBB && CCC +``` +image::{image-root}/basicsequence.png[basic sequence] + +You can execute the same task multiple times in a sequence. For example: +``` +AAA && AAA && AAA +``` +image::{image-root}/samejobsequence.png[basic sequence with repeated job definition] + +If an `ExitStatus` 'FAILED' is returned in a sequence the Composed Task +Runner will terminate. For example if `AAA && BBB && CCC` composed task is +executed and BBB fails. Then CCC will not be launched. + +=== Transitions +The Composed Task Runner supports the ability to control what tasks get +executed based on the `ExitStatus` of the previous task. This is +done by specifying `ExitStatus` after the task definition followed by +the `->` operator and the task definition that should be launched based on +the result. For example: +``` +AAA 'FAILED' -> BBB 'COMPLETED' -> CCC +``` +image::{image-root}/basictransition.png[basic transition] + +Will launch AAA and if AAA fails then BBB will be launched. Else if AAA +completes successfully then CCC will launch. + +You can also have a sequence that follows a transition. For example: +``` +AAA 'FAILED' -> BBB && CCC && DDD +``` +image::{image-root}/basictransitionwithsequence.png[basic transition with sequence] + +Will launch AAA and for any `ExitStatus` that is returned other than 'FAILED' then +CCC && DDD will be launched. However if AAA returns 'FAILED' then BBB will +be launched, but CCC && DDD will not. + +==== Wildcard +Wildcards are also supported in transitions. +For example: +``` +AAA 'FAILED' -> BBB '*'->CCC +``` +image::{image-root}/basictransitionwithwildcard.png[basic transition with wildcard] + +In the case above AAA will launch and any `ExitStatus` other than FAILED will +launch CCC. + +=== Splits +Allows a user to execute tasks in parallel. +For example: +``` + +``` +image::{image-root}/basicsplit.png[basic split] + +Will launch AAA, BBB and CCC in parallel. When launching splits as a part of a +composed task all elements of the split must finish successfully before the +next task definition can be launched for example: +``` + && DDD && EEE +``` +image::{image-root}/basicsplitwithsequence.png[basic split with sequence] + +In the case above once AAA, BBB and CCC complete sucessfully then DDD and EEE +will be launched in the sequence enumerated above. However if one of the task +definitions fails in the split then DDD and EEE will not fire. For example if +BBB fails then AAA and CCC will be marked successful and BBB will be marked a +failure and DDD and EEE will not be launched. + +If any child task within a split returns an `ExitMessage` other than `COMPLETED` the split +will have an `ExitStatus` of `FAILED`. To ignore the `ExitMessage` of a child task, +add the `ignoreExitMessage=true` for each app that will return an `ExitMessage` +within the split. When using this flag, the `ExitStatus` of the task will be +`COMPLETED` if the `ExitCode` of the child task is zero. The split will have an +`ExitStatus` of `FAILED` if the `ExitCode`s is non zero. There are 2 ways to +set the `ignoreExitMessage` flag: + +1. Setting the property for each of the apps that need to have their exitMessage +ignored within the split. For example a split like `` where `BBB` +will return an `exitMessage`, you would set the `ignoreExitMessage` property like +`app.BBB.ignoreExitMessage=true` + +2. You can also set it for all apps using the composed-task-arguments property, +for example: `--composed-task-arguments=--ignoreExitMessage=true`. + +== Configuration + +See the https://docs.spring.io/spring-cloud-dataflow/docs/current/reference/htmlsingle/#spring-cloud-dataflow-composed-tasks[Configuring Composed Task Runner] in the Spring Cloud Data Flow reference guide. diff --git a/spring-cloud-dataflow-composed-task-runner/images/basicsequence.png b/spring-cloud-dataflow-composed-task-runner/images/basicsequence.png new file mode 100644 index 0000000000..1a5dd14aa4 Binary files /dev/null and b/spring-cloud-dataflow-composed-task-runner/images/basicsequence.png differ diff --git a/spring-cloud-dataflow-composed-task-runner/images/basicsplit.png b/spring-cloud-dataflow-composed-task-runner/images/basicsplit.png new file mode 100644 index 0000000000..790f167350 Binary files /dev/null and b/spring-cloud-dataflow-composed-task-runner/images/basicsplit.png differ diff --git a/spring-cloud-dataflow-composed-task-runner/images/basicsplitwithsequence.png b/spring-cloud-dataflow-composed-task-runner/images/basicsplitwithsequence.png new file mode 100644 index 0000000000..8d5f5fc113 Binary files /dev/null and b/spring-cloud-dataflow-composed-task-runner/images/basicsplitwithsequence.png differ diff --git a/spring-cloud-dataflow-composed-task-runner/images/basictransition.png b/spring-cloud-dataflow-composed-task-runner/images/basictransition.png new file mode 100644 index 0000000000..6a8bdf7380 Binary files /dev/null and b/spring-cloud-dataflow-composed-task-runner/images/basictransition.png differ diff --git a/spring-cloud-dataflow-composed-task-runner/images/basictransitionwithsequence.png b/spring-cloud-dataflow-composed-task-runner/images/basictransitionwithsequence.png new file mode 100644 index 0000000000..febcd3501d Binary files /dev/null and b/spring-cloud-dataflow-composed-task-runner/images/basictransitionwithsequence.png differ diff --git a/spring-cloud-dataflow-composed-task-runner/images/basictransitionwithwildcard.png b/spring-cloud-dataflow-composed-task-runner/images/basictransitionwithwildcard.png new file mode 100644 index 0000000000..8a33af8e64 Binary files /dev/null and b/spring-cloud-dataflow-composed-task-runner/images/basictransitionwithwildcard.png differ diff --git a/spring-cloud-dataflow-composed-task-runner/images/samejobsequence.png b/spring-cloud-dataflow-composed-task-runner/images/samejobsequence.png new file mode 100644 index 0000000000..eab31de30b Binary files /dev/null and b/spring-cloud-dataflow-composed-task-runner/images/samejobsequence.png differ diff --git a/spring-cloud-dataflow-composed-task-runner/pom.xml b/spring-cloud-dataflow-composed-task-runner/pom.xml index d7c4ab4553..d24255f523 100644 --- a/spring-cloud-dataflow-composed-task-runner/pom.xml +++ b/spring-cloud-dataflow-composed-task-runner/pom.xml @@ -1,15 +1,26 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent + 3.0.0-SNAPSHOT + org.springframework.cloud spring-cloud-dataflow-composed-task-runner + spring-cloud-dataflow-composed-task-runner + Spring Cloud Data Flow Composed Task Runner + jar + 17 3.3.0 + true + 3.4.1 @@ -17,14 +28,10 @@ spring-boot-autoconfigure compile - - org.springframework.boot - spring-boot-configuration-processor - true - org.springframework.cloud spring-cloud-dataflow-rest-client + ${project.version} io.pivotal.cfenv @@ -37,6 +44,7 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud @@ -45,10 +53,12 @@ com.h2database h2 + [2.2.222,) org.mariadb.jdbc mariadb-java-client + [3.1.2,) org.postgresql @@ -70,32 +80,13 @@ spring-boot-starter-jdbc - org.hibernate + org.hibernate.orm hibernate-core - - org.hibernate - hibernate-entitymanager - - - javax.xml.bind - jaxb-api - - - org.assertj - assertj-core - test - org.springframework.boot spring-boot-starter-test test - - - org.junit.vintage - junit-vintage-engine - - org.codehaus.plexus @@ -105,13 +96,21 @@ org.springframework.cloud - spring-cloud-starter-common-security-config-web + spring-cloud-common-security-config-web test + ${project.version} + + + io.micrometer + micrometer-registry-prometheus io.micrometer.prometheus prometheus-rsocket-spring - test + + + io.micrometer.prometheus + prometheus-rsocket-client @@ -119,84 +118,45 @@ org.springframework.boot spring-boot-maven-plugin - - - io.fabric8 - docker-maven-plugin - 0.33.0 + + + + repackage + + + - - - springcloud/${project.artifactId} - - - latest - ${project.version} - - springcloud/openjdk:2.0.0.RELEASE - - /tmp - - - - ${org.springframework.cloud.dataflow.spring.configuration.metadata.json} - - - - C.UTF-8 - - - - java - -jar - /maven/composed-task-runner.jar - - - - maven - assembly.xml - / - - - - + + IF_NOT_PRESENT + springcloud/${project.artifactId}:${project.version} + - - org.springframework.cloud - spring-cloud-dataflow-apps-metadata-plugin - 1.0.2 - - true - + org.apache.maven.plugins + maven-source-plugin + 3.3.0 - aggregate-metadata - compile + source - aggregate-metadata + jar + package - org.codehaus.mojo - properties-maven-plugin - 1.0.0 + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} - process-classes + javadoc - read-project-properties + jar - - - - ${project.build.outputDirectory}/META-INF/spring-configuration-metadata-encoded.properties - - - + package diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java deleted file mode 100644 index 32da8d15e2..0000000000 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2017-2020 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.cloud.dataflow.composedtaskrunner; - -import javax.sql.DataSource; - -import org.springframework.boot.autoconfigure.batch.BasicBatchConfigurer; -import org.springframework.boot.autoconfigure.batch.BatchProperties; -import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; -import org.springframework.transaction.annotation.Isolation; - -/** - * A BatchConfigurer for CTR that will establish the transaction isolation lavel to READ_COMMITTED. - * - * @author Glenn Renfro - */ -public class ComposedBatchConfigurer extends BasicBatchConfigurer { - /** - * Create a new {@link BasicBatchConfigurer} instance. - * - * @param properties the batch properties - * @param dataSource the underlying data source - * @param transactionManagerCustomizers transaction manager customizers (or - * {@code null}) - */ - protected ComposedBatchConfigurer(BatchProperties properties, DataSource dataSource, TransactionManagerCustomizers transactionManagerCustomizers) { - super(properties, dataSource, transactionManagerCustomizers); - } - - @Override - protected String determineIsolationLevel() { - return "ISOLATION_" + Isolation.READ_COMMITTED; - } -} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerJobFactory.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerJobFactory.java index 49195c5acd..0a2c3bb814 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerJobFactory.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerJobFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,12 +23,16 @@ import java.util.UUID; import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.JobParametersIncrementer; import org.springframework.batch.core.Step; -import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; import org.springframework.batch.core.job.builder.FlowBuilder; import org.springframework.batch.core.job.builder.FlowJobBuilder; +import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.job.flow.Flow; import org.springframework.batch.core.launch.support.RunIdIncrementer; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; @@ -53,6 +57,8 @@ public class ComposedRunnerJobFactory implements FactoryBean { private static final String WILD_CARD = "*"; + private static String CTR_KEY = "ctr.id"; + @Autowired private ApplicationContext context; @@ -60,7 +66,7 @@ public class ComposedRunnerJobFactory implements FactoryBean { private TaskExecutor taskExecutor; @Autowired - private JobBuilderFactory jobBuilderFactory; + private JobRepository jobRepository; @Autowired private TaskNameResolver taskNameResolver; @@ -79,15 +85,12 @@ public class ComposedRunnerJobFactory implements FactoryBean { private String dsl; - private boolean incrementInstanceEnabled; - private int nestedSplits; public ComposedRunnerJobFactory(ComposedTaskProperties properties) { this.composedTaskProperties = properties; Assert.notNull(properties.getGraph(), "The DSL must not be null"); this.dsl = properties.getGraph(); - this.incrementInstanceEnabled = properties.isIncrementInstanceEnabled(); this.flowBuilder = new FlowBuilder<>(UUID.randomUUID().toString()); } @@ -100,15 +103,14 @@ public Job getObject() throws Exception { taskParser.parse().accept(composedRunnerVisitor); this.visitorDeque = composedRunnerVisitor.getFlow(); - - FlowJobBuilder builder = this.jobBuilderFactory - .get(this.taskNameResolver.getTaskName()) + JobBuilder jobBuilder = new JobBuilder(this.taskNameResolver.getTaskName(), jobRepository); + FlowJobBuilder builder = jobBuilder .start(this.flowBuilder .start(createFlow()) .end()) .end(); - if(this.incrementInstanceEnabled) { - builder.incrementer(new RunIdIncrementer()); + if (this.composedTaskProperties.isUuidInstanceEnabled()) { + builder.incrementer(new UuidIncrementer()); } return builder.build(); } @@ -352,4 +354,13 @@ private Flow getTaskAppFlow(TaskAppNode taskApp) { return new FlowBuilder(beanName).from(currentStep).end(); } + + public static class UuidIncrementer implements JobParametersIncrementer { + + @Override + public JobParameters getNext(JobParameters parameters) { + JobParameters params = (parameters == null) ? new JobParameters() : parameters; + return new JobParametersBuilder(params).addString(CTR_KEY, UUID.randomUUID().toString()).toJobParameters(); + } + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java index 78e6d48635..e13f4bd945 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,63 +19,69 @@ import javax.sql.DataSource; import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.configuration.annotation.BatchConfigurer; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.batch.BatchProperties; -import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.task.configuration.EnableTask; +import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.core.task.TaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.transaction.PlatformTransactionManager; /** * Configures the Job that will execute the Composed Task Execution. * * @author Glenn Renfro + * @author Corneil du Plessis */ -@EnableBatchProcessing @EnableTask @EnableConfigurationProperties(ComposedTaskProperties.class) @Configuration @Import(org.springframework.cloud.dataflow.composedtaskrunner.StepBeanDefinitionRegistrar.class) public class ComposedTaskRunnerConfiguration { - @Autowired - private ComposedTaskProperties properties; + @Bean + public TaskExecutionListener taskExecutionListener() { + return new ComposedTaskRunnerTaskListener(); + } @Bean - public StepExecutionListener composedTaskStepExecutionListener(TaskExplorer taskExplorer){ + public StepExecutionListener composedTaskStepExecutionListener(TaskExplorer taskExplorer) { return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener(taskExplorer); } @Bean - public org.springframework.cloud.dataflow.composedtaskrunner.ComposedRunnerJobFactory composedTaskJob() { - - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedRunnerJobFactory(this.properties); + public ComposedRunnerJobFactory composedTaskJob(ComposedTaskProperties properties) { + return new ComposedRunnerJobFactory(properties); } @Bean - public TaskExecutor taskExecutor() { + public TaskExecutor taskExecutor(ComposedTaskProperties properties) { ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor(); taskExecutor.setCorePoolSize(properties.getSplitThreadCorePoolSize()); taskExecutor.setMaxPoolSize(properties.getSplitThreadMaxPoolSize()); taskExecutor.setKeepAliveSeconds(properties.getSplitThreadKeepAliveSeconds()); taskExecutor.setAllowCoreThreadTimeOut( - properties.isSplitThreadAllowCoreThreadTimeout()); + properties.isSplitThreadAllowCoreThreadTimeout()); taskExecutor.setQueueCapacity(properties.getSplitThreadQueueCapacity()); taskExecutor.setWaitForTasksToCompleteOnShutdown( - properties.isSplitThreadWaitForTasksToCompleteOnShutdown()); + properties.isSplitThreadWaitForTasksToCompleteOnShutdown()); return taskExecutor; } + /** + * Provides the {@link JobRepository} that is configured to be used by the composed task runner. + */ @Bean - public BatchConfigurer getComposedBatchConfigurer(BatchProperties properties, DataSource dataSource, TransactionManagerCustomizers transactionManagerCustomizers) { - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedBatchConfigurer(properties, dataSource, transactionManagerCustomizers); + public BeanPostProcessor jobRepositoryBeanPostProcessor(PlatformTransactionManager transactionManager, + DataSource incrementerDataSource, + ComposedTaskProperties composedTaskProperties) { + return new JobRepositoryBeanPostProcessor(transactionManager, incrementerDataSource, composedTaskProperties); } + } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java index 9719fd1d82..8a361b8cff 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,17 +23,29 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.springframework.batch.core.Step; import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; -import org.springframework.cloud.task.configuration.TaskConfigurer; +import org.springframework.cloud.dataflow.core.Base64Utils; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.cloud.task.configuration.TaskProperties; +import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.core.env.Environment; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; +import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.interceptor.DefaultTransactionAttribute; import org.springframework.transaction.interceptor.TransactionAttribute; @@ -45,30 +57,36 @@ * * @author Glenn Renfro * @author Michael Minella + * @author Corneil du Plessis */ public class ComposedTaskRunnerStepFactory implements FactoryBean { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerStepFactory.class); + @Autowired private ComposedTaskProperties composedTaskProperties; private ComposedTaskProperties composedTaskPropertiesFromEnv; - private String taskName; + private final String taskName; - private String taskNameId; + private final String taskNameId; private Map taskSpecificProps = new HashMap<>(); private List arguments = new ArrayList<>(); @Autowired - private StepBuilderFactory steps; + private JobRepository jobRepository; + + @Autowired + private PlatformTransactionManager transactionManager; @Autowired private StepExecutionListener composedTaskStepExecutionListener; @Autowired - private TaskConfigurer taskConfigurer; + private TaskExplorer taskExplorer; @Autowired private TaskProperties taskProperties; @@ -79,8 +97,15 @@ public class ComposedTaskRunnerStepFactory implements FactoryBean { @Autowired(required = false) private OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; + @Autowired(required = false) + private ObjectMapper mapper; + + @Autowired + private Environment environment; + public ComposedTaskRunnerStepFactory( - ComposedTaskProperties composedTaskPropertiesFromEnv, String taskName, String taskNameId) { + ComposedTaskProperties composedTaskPropertiesFromEnv, String taskName, String taskNameId + ) { Assert.notNull(composedTaskPropertiesFromEnv, "composedTaskProperties must not be null"); Assert.hasText(taskName, "taskName must not be empty nor null"); @@ -91,47 +116,67 @@ public ComposedTaskRunnerStepFactory( } public void setTaskSpecificProps(Map taskSpecificProps) { - if(taskSpecificProps != null) { + if (taskSpecificProps != null) { this.taskSpecificProps = taskSpecificProps; } } public void setArguments(List arguments) { - if(arguments != null) { + if (arguments != null) { this.arguments = arguments; } } @Override - public Step getObject() throws Exception { - + public Step getObject() { + if (this.mapper == null) { + this.mapper = new ObjectMapper(); + this.mapper.registerModule(new Jdk8Module()); + this.mapper.registerModule(new Jackson2HalModule()); + this.mapper.registerModule(new JavaTimeModule()); + this.mapper.registerModule(new Jackson2DataflowModule()); + } TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet( - this.clientRegistrations, this.clientCredentialsTokenResponseClient, taskConfigurer.getTaskExplorer(), - this.composedTaskPropertiesFromEnv, this.taskName, taskProperties); - - List argumentsFromAppProperties = this.composedTaskProperties.getComposedTaskAppArguments().entrySet().stream() - .filter(e -> e.getKey().startsWith("app." + taskNameId)) - .map(e -> e.getValue()) - .collect(Collectors.toList()); + this.clientRegistrations, + this.clientCredentialsTokenResponseClient, + this.taskExplorer, + this.composedTaskPropertiesFromEnv, + this.taskName, + taskProperties, + environment, this.mapper); + + List argumentsFromAppProperties = Base64Utils.decodeMap(this.composedTaskProperties.getComposedTaskAppArguments()) + .entrySet() + .stream() + .filter(e -> e.getKey().startsWith("app." + taskNameId + ".") || e.getKey().startsWith("app.*.")) + .map(Map.Entry::getValue) + .collect(Collectors.toList()); List argumentsToUse = Stream.concat(this.arguments.stream(), argumentsFromAppProperties.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toList()); taskLauncherTasklet.setArguments(argumentsToUse); - Map propertiesFrom = this.composedTaskProperties.getComposedTaskAppProperties().entrySet().stream() - .filter(e -> e.getKey().startsWith("app." + taskNameId)) - .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue())); + logger.debug("decoded composed-task-app-properties {}", composedTaskProperties.getComposedTaskAppProperties()); + + Map propertiesFrom = Base64Utils + .decodeMap(this.composedTaskProperties.getComposedTaskAppProperties()).entrySet().stream() + .filter(e -> + e.getKey().startsWith("app." + taskNameId + ".") || + e.getKey().startsWith("app.*.") || + e.getKey().startsWith("deployer." + taskNameId + ".") || + e.getKey().startsWith("deployer.*.")) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + Map propertiesToUse = new HashMap<>(); propertiesToUse.putAll(this.taskSpecificProps); propertiesToUse.putAll(propertiesFrom); taskLauncherTasklet.setProperties(propertiesToUse); - - String stepName = this.taskName; - - return this.steps.get(stepName) - .tasklet(taskLauncherTasklet) + logger.debug("Properties to use {}", propertiesToUse); + StepBuilder stepBuilder = new StepBuilder(this.taskName, this.jobRepository); + return stepBuilder + .tasklet(taskLauncherTasklet, this.transactionManager) .transactionAttribute(getTransactionAttribute()) .listener(this.composedTaskStepExecutionListener) .build(); @@ -143,7 +188,7 @@ public Step getObject() throws Exception { * what is in its transaction. By setting isolation to READ_COMMITTED * The task launcher can see latest state of the db. Since the changes * to the task execution are done by the tasks. - + * * @return DefaultTransactionAttribute with isolation set to READ_COMMITTED. */ private TransactionAttribute getTransactionAttribute() { @@ -159,8 +204,4 @@ public Class getObjectType() { return Step.class; } - @Override - public boolean isSingleton() { - return true; - } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java new file mode 100644 index 0000000000..ca02b6bc75 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java @@ -0,0 +1,23 @@ +package org.springframework.cloud.dataflow.composedtaskrunner; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.task.listener.TaskExecutionListener; +import org.springframework.cloud.task.repository.TaskExecution; + +public class ComposedTaskRunnerTaskListener implements TaskExecutionListener { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerTaskListener.class); + + private static Long executionId = null; + + @Override + public void onTaskStartup(TaskExecution taskExecution) { + executionId = taskExecution.getExecutionId(); + logger.info("onTaskStartup:executionId={}", executionId); + } + + public static Long getExecutionId() { + return executionId; + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java index 50dee971ff..8e0410ff3a 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,12 +16,12 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.listener.StepExecutionListenerSupport; +import org.springframework.batch.core.StepExecutionListener; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.util.Assert; @@ -33,12 +33,12 @@ * exit code. * * @author Glenn Renfro + * @author Corneil du Plessis */ -public class ComposedTaskStepExecutionListener extends StepExecutionListenerSupport { +public class ComposedTaskStepExecutionListener implements StepExecutionListener { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskStepExecutionListener.class); - private TaskExplorer taskExplorer; - - private static final Log logger = LogFactory.getLog(org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener.class); + private final TaskExplorer taskExplorer; public ComposedTaskStepExecutionListener(TaskExplorer taskExplorer) { Assert.notNull(taskExplorer, "taskExplorer must not be null."); @@ -48,37 +48,31 @@ public ComposedTaskStepExecutionListener(TaskExplorer taskExplorer) { /** * If endTime for task is null then the ExitStatus will be set to UNKNOWN. * If an exitMessage is returned by the TaskExecution then the exit status - * returned will be the ExitMessage. If no exitMessage is set for the task execution and the - * task returns an exitCode ! = to zero an exit status of FAILED is - * returned. If no exit message is set and the exit code of the task is - * zero then the ExitStatus of COMPLETED is returned. + * returned will be the ExitMessage. If no exitMessage is set for the task execution or + * {@link TaskLauncherTasklet#IGNORE_EXIT_MESSAGE_PROPERTY} is set to true as a task property + * and the task returns an exitCode != to zero an exit status of FAILED is + * returned. If no exit message is set or + * {@link TaskLauncherTasklet#IGNORE_EXIT_MESSAGE_PROPERTY} is set to true as a task property + * and the exit code of the task is zero then the ExitStatus of COMPLETED is returned. + * * @param stepExecution The stepExecution that kicked of the Task. * @return ExitStatus of COMPLETED else FAILED. */ @Override public ExitStatus afterStep(StepExecution stepExecution) { + logger.info("AfterStep processing for stepExecution {}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId()); ExitStatus result = ExitStatus.COMPLETED; - logger.info(String.format("AfterStep processing for stepExecution %s", - stepExecution.getStepName())); - Long executionId = (Long) stepExecution.getExecutionContext().get("task-execution-id"); - Assert.notNull(executionId, "TaskLauncherTasklet did not " + - "return a task-execution-id. Check to see if task " + - "exists."); - - TaskExecution resultExecution = this.taskExplorer.getTaskExecution(executionId); - - if (!StringUtils.isEmpty(resultExecution.getExitMessage())) { + Assert.notNull(executionId, "TaskLauncherTasklet for job " + stepExecution.getJobExecutionId() + + " did not return a task-execution-id. Check to see if task exists."); + TaskExecution resultExecution = taskExplorer.getTaskExecution(executionId); + if (!stepExecution.getExecutionContext().containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE) && + StringUtils.hasText(resultExecution.getExitMessage())) { result = new ExitStatus(resultExecution.getExitMessage()); - } - else if (resultExecution.getExitCode() != 0) { + } else if (resultExecution.getExitCode() != 0) { result = ExitStatus.FAILED; } - - logger.info(String.format("AfterStep processing complete for " + - "stepExecution %s with taskExecution %s", - stepExecution.getStepName(), executionId)); + logger.info("AfterStep processing complete for stepExecution {} with taskExecution {}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId()); return result; } - } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java index 4d69835394..c6ded5e05c 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/DataFlowConfiguration.java @@ -48,6 +48,7 @@ public InMemoryClientRegistrationRepository clientRegistrationRepository( .clientId(properties.getOauth2ClientCredentialsClientId()) .clientSecret(properties.getOauth2ClientCredentialsClientSecret()) .scope(properties.getOauth2ClientCredentialsScopes()) + .clientAuthenticationMethod(properties.getOauth2ClientCredentialsClientAuthenticationMethod()) .build(); return new InMemoryClientRegistrationRepository(clientRegistration); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/JobRepositoryBeanPostProcessor.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/JobRepositoryBeanPostProcessor.java new file mode 100644 index 0000000000..0eaa44d82f --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/JobRepositoryBeanPostProcessor.java @@ -0,0 +1,85 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.composedtaskrunner; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.config.BeanPostProcessor; +import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; +import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.core.Ordered; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * CTR requires that the JobRepository that it uses to have its own {@link MultiSchemaIncrementerFactory}. + * As of Batch 5.x DefaultBatchConfiguration is now used to override default beans, however this disables + * BatchAutoConfiguration. To work around this we use a bean post processor to create our own {@link JobRepository}. + * + * @author Glenn Renfro + */ +public class JobRepositoryBeanPostProcessor implements BeanPostProcessor, Ordered { + private static final Logger logger = LoggerFactory.getLogger(JobRepositoryBeanPostProcessor.class); + + private PlatformTransactionManager transactionManager; + private DataSource incrementerDataSource; + private ComposedTaskProperties composedTaskProperties; + + public JobRepositoryBeanPostProcessor(PlatformTransactionManager transactionManager, DataSource incrementerDataSource, + ComposedTaskProperties composedTaskProperties) { + this.transactionManager = transactionManager; + this.incrementerDataSource = incrementerDataSource; + this.composedTaskProperties = composedTaskProperties; + } + + @Override + public int getOrder() { + return Ordered.HIGHEST_PRECEDENCE; + } + + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + if (beanName.equals("jobRepository")) { + logger.debug("Replacing BatchAutoConfiguration's jobRepository Bean with one provided by composed task runner."); + bean = jobRepository(transactionManager, incrementerDataSource, composedTaskProperties); + } + return bean; + } + + private JobRepository jobRepository(PlatformTransactionManager transactionManager, DataSource incrementerDataSource, + ComposedTaskProperties composedTaskProperties) { + JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean(); + MultiSchemaIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(incrementerDataSource); + factory.setIncrementerFactory(incrementerFactory); + factory.setDataSource(incrementerDataSource); + factory.setTransactionManager(transactionManager); + factory.setIsolationLevelForCreate(composedTaskProperties.getTransactionIsolationLevel()); + try { + factory.afterPropertiesSet(); + return factory.getObject(); + } + catch (Exception exception) { + throw new ComposedTaskException(exception.getMessage()); + } + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/RelaxedNames.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/RelaxedNames.java index c84bf60bd6..c313aefaff 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/RelaxedNames.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/RelaxedNames.java @@ -90,7 +90,7 @@ public String apply(String value) { @Override public String apply(String value) { - return (value.isEmpty() ? value : value.toLowerCase(Locale.ENGLISH)); + return (value.isEmpty() ? value : value.toLowerCase(Locale.ROOT)); } }, @@ -99,7 +99,7 @@ public String apply(String value) { @Override public String apply(String value) { - return (value.isEmpty() ? value : value.toUpperCase(Locale.ENGLISH)); + return (value.isEmpty() ? value : value.toUpperCase(Locale.ROOT)); } }; @@ -224,7 +224,7 @@ private static String separatedToCamelCase(String value, } StringBuilder builder = new StringBuilder(); for (String field : SEPARATED_TO_CAMEL_CASE_PATTERN.split(value)) { - field = (caseInsensitive ? field.toLowerCase(Locale.ENGLISH) : field); + field = (caseInsensitive ? field.toLowerCase(Locale.ROOT) : field); builder.append( builder.length() != 0 ? StringUtils.capitalize(field) : field); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java index dd694b9979..fd19bef8d6 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/StepBeanDefinitionRegistrar.java @@ -17,7 +17,6 @@ package org.springframework.cloud.dataflow.composedtaskrunner; import java.net.URI; -import java.net.URISyntaxException; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; @@ -39,6 +38,7 @@ import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotationMetadata; +import org.springframework.security.oauth2.core.ClientAuthenticationMethod; import org.springframework.util.StringUtils; /** @@ -199,13 +199,29 @@ public void setEnvironment(Environment environment) { private ComposedTaskProperties composedTaskProperties() { ComposedTaskProperties properties = new ComposedTaskProperties(); + + String skipTlsCertificateVerification = getPropertyValue("skip-tls-certificate-verification"); + if (skipTlsCertificateVerification != null) { + properties.setSkipTlsCertificateVerification(Boolean.parseBoolean(skipTlsCertificateVerification)); + } String dataFlowUriString = getPropertyValue("dataflow-server-uri"); + if (dataFlowUriString != null) { + properties.setDataflowServerUri(URI.create(dataFlowUriString)); + } + String maxStartWaitTime = getPropertyValue("max-start-wait-time"); + if (maxStartWaitTime != null) { + properties.setMaxStartWaitTime(Integer.parseInt(maxStartWaitTime)); + } String maxWaitTime = getPropertyValue("max-wait-time"); - String intervalTimeBetweenChecks = - getPropertyValue("interval-time-between-checks"); + if (maxWaitTime != null) { + properties.setMaxWaitTime(Integer.parseInt(maxWaitTime)); + } + String intervalTimeBetweenChecks = getPropertyValue("interval-time-between-checks"); + if (intervalTimeBetweenChecks != null) { + properties.setIntervalTimeBetweenChecks(Integer.parseInt(intervalTimeBetweenChecks)); + } properties.setGraph(getPropertyValue("graph")); - properties.setComposedTaskArguments( - getPropertyValue("composed-task-arguments")); + properties.setComposedTaskArguments(getPropertyValue("composed-task-arguments")); properties.setPlatformName(getPropertyValue("platform-name")); properties.setComposedTaskProperties(getPropertyValue("composed-task-properties")); properties.setDataflowServerAccessToken(getPropertyValue("dataflow-server-access-token")); @@ -213,22 +229,13 @@ private ComposedTaskProperties composedTaskProperties() { properties.setDataflowServerUsername(getPropertyValue("dataflow-server-username")); properties.setOauth2ClientCredentialsClientId(getPropertyValue("oauth2-client-credentials-client-id")); properties.setOauth2ClientCredentialsClientSecret(getPropertyValue("oauth2-client-credential-client-secret")); - properties.setOauth2ClientCredentialsScopes(StringUtils.commaDelimitedListToSet(getPropertyValue("oauth2-client-credentials-scopes"))); - if (maxWaitTime != null) { - properties.setMaxWaitTime(Integer.valueOf(maxWaitTime)); - } - if (intervalTimeBetweenChecks != null) { - properties.setIntervalTimeBetweenChecks(Integer.valueOf( - intervalTimeBetweenChecks)); - } - if (dataFlowUriString != null) { - try { - properties.setDataflowServerUri(new URI(dataFlowUriString)); - } - catch (URISyntaxException e) { - throw new IllegalArgumentException("Invalid Data Flow URI"); - } + + String oauth2ClientCredentialsClientAuthenticationMethodAsString = getPropertyValue("oauth2-client-credential-client-authentication-method"); + if (oauth2ClientCredentialsClientAuthenticationMethodAsString != null) { + properties.setOauth2ClientCredentialsClientAuthenticationMethod(new ClientAuthenticationMethod(oauth2ClientCredentialsClientAuthenticationMethodAsString)); } + + properties.setOauth2ClientCredentialsScopes(StringUtils.commaDelimitedListToSet(getPropertyValue("oauth2-client-credentials-scopes"))); return properties; } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java index efd4933548..91fa2480fe 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,25 +21,37 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepContribution; -import org.springframework.batch.core.UnexpectedJobExecutionException; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.item.ExecutionContext; import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.boot.context.properties.bind.Bindable; +import org.springframework.boot.context.properties.bind.Binder; +import org.springframework.boot.context.properties.source.MapConfigurationPropertySource; import org.springframework.cloud.common.security.core.support.OAuth2AccessTokenProvidingClientHttpRequestInterceptor; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; +import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; import org.springframework.cloud.dataflow.composedtaskrunner.support.TaskExecutionTimeoutException; +import org.springframework.cloud.dataflow.composedtaskrunner.support.UnexpectedTaskExecutionException; import org.springframework.cloud.dataflow.rest.client.DataFlowOperations; import org.springframework.cloud.dataflow.rest.client.DataFlowTemplate; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.cloud.dataflow.rest.util.HttpClientConfigurer; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.core.env.Environment; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; +import org.springframework.lang.Nullable; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; import org.springframework.security.oauth2.client.registration.ClientRegistration; @@ -52,44 +64,62 @@ /** * Executes task launch request using Spring Cloud Data Flow's Restful API * then returns the execution id once the task launched. - * + *

* Note: This class is not thread-safe and as such should not be used as a singleton. * * @author Glenn Renfro */ public class TaskLauncherTasklet implements Tasklet { + final static String IGNORE_EXIT_MESSAGE = "IGNORE_EXIT_MESSAGE"; + + final static String IGNORE_EXIT_MESSAGE_PROPERTY = "ignore-exit-message"; - private ComposedTaskProperties composedTaskProperties; + private final ComposedTaskProperties composedTaskProperties; - private TaskExplorer taskExplorer; + private final TaskExplorer taskExplorer; private Map properties; private List arguments; - private String taskName; + private final String taskName; - private static final Log logger = LogFactory.getLog(org.springframework.cloud.dataflow.composedtaskrunner.TaskLauncherTasklet.class); + private static final Logger logger = LoggerFactory.getLogger(TaskLauncherTasklet.class); private Long executionId; + private long startTimeout; + private long timeout; - private ClientRegistrationRepository clientRegistrations; + private final ClientRegistrationRepository clientRegistrations; - private OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; + private final OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; private TaskOperations taskOperations; TaskProperties taskProperties; + private final ObjectMapper mapper; public TaskLauncherTasklet( ClientRegistrationRepository clientRegistrations, OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient, TaskExplorer taskExplorer, - ComposedTaskProperties composedTaskProperties, String taskName, - TaskProperties taskProperties) { + ComposedTaskProperties composedTaskProperties, + String taskName, + TaskProperties taskProperties, + Environment environment, + @Nullable ObjectMapper mapper + ) { + if (mapper == null) { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); + } + this.mapper = mapper; Assert.hasText(taskName, "taskName must not be empty nor null."); Assert.notNull(taskExplorer, "taskExplorer must not be null."); Assert.notNull(composedTaskProperties, @@ -104,19 +134,17 @@ public TaskLauncherTasklet( } public void setProperties(Map properties) { - if(properties != null) { + if (properties != null) { this.properties = properties; - } - else { + } else { this.properties = new HashMap<>(0); } } public void setArguments(List arguments) { - if(arguments != null) { + if (arguments != null) { this.arguments = arguments; - } - else { + } else { this.arguments = new ArrayList<>(0); } } @@ -130,12 +158,15 @@ public void setArguments(List arguments) { * @return Repeat status of FINISHED. */ @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) { + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) { TaskOperations taskOperations = taskOperations(); if (this.executionId == null) { + this.startTimeout = System.currentTimeMillis() + + this.composedTaskProperties.getMaxStartWaitTime(); this.timeout = System.currentTimeMillis() + this.composedTaskProperties.getMaxWaitTime(); + logger.debug("Wait time for this task to start is " + + this.composedTaskProperties.getMaxStartWaitTime()); logger.debug("Wait time for this task to complete is " + this.composedTaskProperties.getMaxWaitTime()); logger.debug("Interval check time for this task to complete is " + @@ -152,48 +183,71 @@ public RepeatStatus execute(StepContribution contribution, args = (List) stepExecutionContext.get("task-arguments"); } List cleansedArgs = new ArrayList<>(); - if(args != null) { - for(String argument : args) { - if(!argument.startsWith("--spring.cloud.task.parent-execution-id=")) { + if (args != null) { + for (String argument : args) { + if (!argument.startsWith("--spring.cloud.task.parent-execution-id=") && !argument.startsWith("--spring.cloud.task.parent-execution-id%")) { cleansedArgs.add(argument); + } else { + logger.debug("cleanse:removing argument:{}", argument); } } args = cleansedArgs; } - if(this.taskProperties.getExecutionid() != null) { - args.add("--spring.cloud.task.parent-execution-id=" + this.taskProperties.getExecutionid()); + if (args == null) { + args = new ArrayList<>(); + } + Long parentTaskExecutionId = getParentTaskExecutionId(contribution); + if (parentTaskExecutionId != null) { + args.add("--spring.cloud.task.parent-execution-id=" + parentTaskExecutionId); + } else { + logger.error("Cannot find task execution id"); } - if(StringUtils.hasText(this.composedTaskProperties.getPlatformName())) { + + if (StringUtils.hasText(this.composedTaskProperties.getPlatformName())) { properties.put("spring.cloud.dataflow.task.platformName", this.composedTaskProperties.getPlatformName()); } - this.executionId = taskOperations.launch(tmpTaskName, - this.properties, args); + logger.debug("execute:{}:{}:{}", tmpTaskName, this.properties, args); + LaunchResponseResource response = taskOperations.launch(tmpTaskName, this.properties, args); - stepExecutionContext.put("task-execution-id", executionId); - stepExecutionContext.put("task-arguments", args); - } - else { + this.executionId = response.getExecutionId(); + + stepExecutionContext.put("task-execution-id", response.getExecutionId()); + + stepExecutionContext.put("task-name", tmpTaskName); + if (!args.isEmpty()) { + stepExecutionContext.put("task-arguments", args); + } + Boolean ignoreExitMessage = isIgnoreExitMessage(args, this.properties); + if (ignoreExitMessage != null) { + stepExecutionContext.put(IGNORE_EXIT_MESSAGE, ignoreExitMessage); + } + } else { try { Thread.sleep(this.composedTaskProperties.getIntervalTimeBetweenChecks()); - } - catch (InterruptedException e) { + } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IllegalStateException(e.getMessage(), e); } - TaskExecution taskExecution = - this.taskExplorer.getTaskExecution(this.executionId); + TaskExecution taskExecution = this.taskExplorer.getTaskExecution(this.executionId); if (taskExecution != null && taskExecution.getEndTime() != null) { if (taskExecution.getExitCode() == null) { - throw new UnexpectedJobExecutionException("Task returned a null exit code."); + throw new UnexpectedTaskExecutionException("Task returned a null exit code.", taskExecution); } else if (taskExecution.getExitCode() != 0) { - throw new UnexpectedJobExecutionException("Task returned a non zero exit code."); + throw new UnexpectedTaskExecutionException("Task returned a non zero exit code.", taskExecution); } else { return RepeatStatus.FINISHED; } } + if (this.composedTaskProperties.getMaxStartWaitTime() > 0 && + (taskExecution == null || taskExecution.getStartTime() == null) && + System.currentTimeMillis() > startTimeout) { + throw new TaskExecutionTimeoutException(String.format( + "Timeout occurred during startup of task with Execution Id %s", + this.executionId)); + } if (this.composedTaskProperties.getMaxWaitTime() > 0 && System.currentTimeMillis() > timeout) { throw new TaskExecutionTimeoutException(String.format( @@ -204,9 +258,28 @@ else if (taskExecution.getExitCode() != 0) { return RepeatStatus.CONTINUABLE; } + public Long getParentTaskExecutionId(StepContribution stepContribution) { + Long result = null; + if (this.taskProperties.getExecutionid() != null) { + result = this.taskProperties.getExecutionid(); + logger.debug("getParentTaskExecutionId:taskProperties.executionId={}", result); + } else if (ComposedTaskRunnerTaskListener.getExecutionId() != null) { + result = ComposedTaskRunnerTaskListener.getExecutionId(); + logger.debug("getParentTaskExecutionId:ComposedTaskRunnerTaskListener.executionId={}", result); + } else if (stepContribution != null) { + result = this.taskExplorer.getTaskExecutionIdByJobExecutionId(stepContribution.getStepExecution().getJobExecutionId()); + } + return result; + } + public TaskOperations taskOperations() { - if(this.taskOperations == null) { + if (this.taskOperations == null) { this.taskOperations = dataFlowOperations().taskOperations(); + if (taskOperations == null) { + throw new ComposedTaskException("Unable to connect to Data Flow " + + "Server to execute task operations. Verify that Data Flow " + + "Server's tasks/definitions endpoint can be accessed."); + } } return this.taskOperations; } @@ -214,9 +287,10 @@ public TaskOperations taskOperations() { /** * @return new instance of DataFlowOperations */ - private DataFlowOperations dataFlowOperations() { + protected DataFlowOperations dataFlowOperations() { final RestTemplate restTemplate = DataFlowTemplate.getDefaultDataflowRestTemplate(); + validateUsernamePassword(this.composedTaskProperties.getDataflowServerUsername(), this.composedTaskProperties.getDataflowServerPassword()); HttpClientConfigurer clientHttpRequestFactoryBuilder = null; @@ -235,18 +309,15 @@ private DataFlowOperations dataFlowOperations() { final OAuth2AccessTokenResponse res = this.clientCredentialsTokenResponseClient.getTokenResponse(grantRequest); accessTokenValue = res.getAccessToken().getTokenValue(); logger.debug("Configured OAuth2 Client Credentials for accessing the Data Flow Server"); - } - else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerAccessToken())) { + } else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerAccessToken())) { accessTokenValue = this.composedTaskProperties.getDataflowServerAccessToken(); logger.debug("Configured OAuth2 Access Token for accessing the Data Flow Server"); - } - else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUsername()) + } else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUsername()) && StringUtils.hasText(this.composedTaskProperties.getDataflowServerPassword())) { - accessTokenValue = null; - clientHttpRequestFactoryBuilder.basicAuthCredentials(composedTaskProperties.getDataflowServerUsername(), composedTaskProperties.getDataflowServerPassword()); + clientHttpRequestFactoryBuilder.basicAuthCredentials(composedTaskProperties.getDataflowServerUsername(), + composedTaskProperties.getDataflowServerPassword()); logger.debug("Configured basic security for accessing the Data Flow Server"); - } - else { + } else { logger.debug("Not configuring basic security for accessing the Data Flow Server"); } @@ -254,11 +325,18 @@ else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUserna restTemplate.getInterceptors().add(new OAuth2AccessTokenProvidingClientHttpRequestInterceptor(accessTokenValue)); } + if (this.composedTaskProperties.isSkipTlsCertificateVerification()) { + if (clientHttpRequestFactoryBuilder == null) { + clientHttpRequestFactoryBuilder = HttpClientConfigurer.create(this.composedTaskProperties.getDataflowServerUri()); + } + clientHttpRequestFactoryBuilder.skipTlsCertificateVerification(); + } + if (clientHttpRequestFactoryBuilder != null) { restTemplate.setRequestFactory(clientHttpRequestFactoryBuilder.buildClientHttpRequestFactory()); } - return new DataFlowTemplate(this.composedTaskProperties.getDataflowServerUri(), restTemplate); + return new DataFlowTemplate(this.composedTaskProperties.getDataflowServerUri(), restTemplate, mapper); } private void validateUsernamePassword(String userName, String password) { @@ -269,4 +347,42 @@ private void validateUsernamePassword(String userName, String password) { throw new IllegalArgumentException("A username may be specified only together with a password"); } } + + private Boolean isIgnoreExitMessage(List args, Map properties) { + Boolean result = null; + + if (properties != null) { + MapConfigurationPropertySource mapConfigurationPropertySource = new MapConfigurationPropertySource(); + properties.forEach((key, value) -> { + key = key.substring(key.lastIndexOf(".") + 1); + mapConfigurationPropertySource.put(key, value); + }); + result = isIgnoreMessagePresent(mapConfigurationPropertySource); + } + + if (args != null) { + MapConfigurationPropertySource mapConfigurationPropertySource = new MapConfigurationPropertySource(); + for (String arg : args) { + int firstEquals = arg.indexOf('='); + if (firstEquals != -1) { + mapConfigurationPropertySource.put(arg.substring(0, firstEquals), arg.substring(firstEquals + 1).trim()); + } + } + Boolean argResult = isIgnoreMessagePresent(mapConfigurationPropertySource); + if (argResult != null) { + result = argResult; + } + } + return result; + } + + private Boolean isIgnoreMessagePresent(MapConfigurationPropertySource mapConfigurationPropertySource) { + Binder binder = new Binder(mapConfigurationPropertySource); + try { + return binder.bind(IGNORE_EXIT_MESSAGE_PROPERTY, Bindable.of(Boolean.class)).get(); + } catch (Exception e) { + // error means we couldn't bind, caller seem to handle null + } + return null; + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java index 66d1cb437d..4a12eeb6f9 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskProperties.java @@ -23,6 +23,8 @@ import java.util.Set; import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.NestedConfigurationProperty; +import org.springframework.security.oauth2.core.ClientAuthenticationMethod; /** * Configuration properties used to setup the ComposedTaskRunner. @@ -33,6 +35,8 @@ @ConfigurationProperties public class ComposedTaskProperties { + public static final int MAX_START_WAIT_TIME_DEFAULT = 0; + public static final int MAX_WAIT_TIME_DEFAULT = 0; public static final int INTERVAL_TIME_BETWEEN_CHECKS_DEFAULT = 10000; @@ -45,6 +49,12 @@ public class ComposedTaskProperties { public static final int SPLIT_THREAD_QUEUE_CAPACITY_DEFAULT = Integer.MAX_VALUE; + /** + * The maximum amount of time in millis that the ComposedTaskRunner will wait for the + * start_time of a steps taskExecution to be set before the execution of the Composed task is failed. + */ + private int maxStartWaitTime = MAX_START_WAIT_TIME_DEFAULT; + /** * The maximum amount of time in millis that a individual step can run before * the execution of the Composed task is failed. @@ -98,6 +108,12 @@ public class ComposedTaskProperties { */ private String oauth2ClientCredentialsClientSecret; + /** + * The OAuth2 Client Authentication Method (Used for client credentials grant to + * specify how {@link #oauth2ClientCredentialsClientId} and {@link #oauth2ClientCredentialsClientSecret} are + * going to be sent). + */ + private ClientAuthenticationMethod oauth2ClientCredentialsClientAuthenticationMethod; /** * Token URI for the OAuth2 provider (Used for the client credentials grant). */ @@ -126,11 +142,13 @@ public class ComposedTaskProperties { /** * Properties for defining task app arguments. */ + @NestedConfigurationProperty private Map composedTaskAppArguments = new HashMap<>(); /** * Properties for defining task app properties. */ + @NestedConfigurationProperty private Map composedTaskAppProperties = new HashMap<>(); /** @@ -172,17 +190,28 @@ public class ComposedTaskProperties { /** * Allows a single ComposedTaskRunner instance to be re-executed without - * changing the parameters. Default is false which means a - * ComposedTaskRunner instance can only be executed once with a given set - * of parameters, if true it can be re-executed. + * changing the parameters. It does this by applying a {@code run.id} with a UUid. + * Default is true. + * Set this option to `true` when running multiple instances of the same composed task definition at the same time. */ - private boolean incrementInstanceEnabled = true; + private boolean uuidInstanceEnabled = true; /** * The platform property that will be used for each task in the workflow when it is launched. */ private String platformName; + /** + * If true skips SSL certificate validation for SCDF server communication. + */ + private boolean skipTlsCertificateVerification = false; + + /** + * Establish the transaction isolation level for the Composed Task Runner. + * Default is ISOLATION_REPEATABLE_READ. + */ + private String transactionIsolationLevel = "ISOLATION_REPEATABLE_READ"; + public ComposedTaskProperties() { try { this.dataflowServerUri = new URI("http://localhost:9393/"); @@ -192,6 +221,14 @@ public ComposedTaskProperties() { } } + public int getMaxStartWaitTime() { + return this.maxStartWaitTime; + } + + public void setMaxStartWaitTime(int maxStartWaitTime) { + this.maxStartWaitTime = maxStartWaitTime; + } + public int getMaxWaitTime() { return this.maxWaitTime; } @@ -320,14 +357,6 @@ public void setSplitThreadWaitForTasksToCompleteOnShutdown(boolean splitThreadWa this.splitThreadWaitForTasksToCompleteOnShutdown = splitThreadWaitForTasksToCompleteOnShutdown; } - public boolean isIncrementInstanceEnabled() { - return incrementInstanceEnabled; - } - - public void setIncrementInstanceEnabled(boolean incrementInstanceEnabled) { - this.incrementInstanceEnabled = incrementInstanceEnabled; - } - public String getDataflowServerAccessToken() { return dataflowServerAccessToken; } @@ -352,6 +381,14 @@ public void setOauth2ClientCredentialsClientSecret(String oauth2ClientCredential this.oauth2ClientCredentialsClientSecret = oauth2ClientCredentialsClientSecret; } + public ClientAuthenticationMethod getOauth2ClientCredentialsClientAuthenticationMethod(){ + return oauth2ClientCredentialsClientAuthenticationMethod; + } + + public void setOauth2ClientCredentialsClientAuthenticationMethod(ClientAuthenticationMethod oauth2ClientCredentialsClientAuthenticationMethod){ + this.oauth2ClientCredentialsClientAuthenticationMethod = oauth2ClientCredentialsClientAuthenticationMethod; + } + public String getOauth2ClientCredentialsTokenUri() { return oauth2ClientCredentialsTokenUri; } @@ -375,4 +412,28 @@ public String getPlatformName() { public void setPlatformName(String platformName) { this.platformName = platformName; } + + public boolean isSkipTlsCertificateVerification() { + return skipTlsCertificateVerification; + } + + public void setSkipTlsCertificateVerification(boolean skipTlsCertificateVerification) { + this.skipTlsCertificateVerification = skipTlsCertificateVerification; + } + + public boolean isUuidInstanceEnabled() { + return uuidInstanceEnabled; + } + + public void setUuidInstanceEnabled(boolean uuIdInstanceEnabled) { + this.uuidInstanceEnabled = uuIdInstanceEnabled; + } + + public String getTransactionIsolationLevel() { + return transactionIsolationLevel; + } + + public void setTransactionIsolationLevel(String transactionIsolationLevel) { + this.transactionIsolationLevel = transactionIsolationLevel; + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/ComposedTaskException.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/ComposedTaskException.java new file mode 100644 index 0000000000..d9b9b312ae --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/ComposedTaskException.java @@ -0,0 +1,29 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * General {@link java.lang.RuntimeException} thrown if error occurs in processing + * composed task runner graph. + * @author Glenn Renfro + * @since 2.8.0 + */ +package org.springframework.cloud.dataflow.composedtaskrunner.support; + +public class ComposedTaskException extends RuntimeException{ + public ComposedTaskException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/SqlServerSequenceMaxValueIncrementer.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/SqlServerSequenceMaxValueIncrementer.java new file mode 100644 index 0000000000..d882e34483 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/SqlServerSequenceMaxValueIncrementer.java @@ -0,0 +1,37 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.composedtaskrunner.support; + +import javax.sql.DataSource; + +import org.springframework.jdbc.support.incrementer.AbstractSequenceMaxValueIncrementer; + +/** + * Incrementer using SQL Server's sequence. + * @author Glenn Renfro + * @since 2.8.0 + */ +public class SqlServerSequenceMaxValueIncrementer extends AbstractSequenceMaxValueIncrementer { + + public SqlServerSequenceMaxValueIncrementer(DataSource dataSource, String incrementerName) { + super(dataSource, incrementerName); + } + @Override + protected String getSequenceQuery() { + return "select next value for " + getIncrementerName(); + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/UnexpectedTaskExecutionException.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/UnexpectedTaskExecutionException.java new file mode 100644 index 0000000000..6056ae7f52 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/support/UnexpectedTaskExecutionException.java @@ -0,0 +1,187 @@ +/* + * Copyright 2017-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.composedtaskrunner.support; + +import java.time.LocalDateTime; + +import org.springframework.batch.core.UnexpectedJobExecutionException; +import org.springframework.boot.ExitCodeGenerator; +import org.springframework.cloud.task.repository.TaskExecution; + +/** + * Creates a {@link UnexpectedTaskExecutionException} which extends {@link UnsupportedOperationException}, but + * also contains the exitCode as information. + * + * @author Tobias Soloschenko + */ +public class UnexpectedTaskExecutionException extends UnexpectedJobExecutionException implements ExitCodeGenerator { + + private static final long serialVersionUID = 1080992679855603656L; + + /** + * The unique id associated with the task execution. + */ + private long executionId; + + /** + * The parent task execution id. + */ + private Long parentExecutionId; + + /** + * The recorded exit code for the task. + */ + private Integer exitCode = -1; + + /** + * User defined name for the task. + */ + private String taskName; + + /** + * Time of when the task was started. + */ + private LocalDateTime startTime; + + /** + * Timestamp of when the task was completed/terminated. + */ + private LocalDateTime endTime; + + /** + * Message returned from the task or stacktrace. + */ + private String exitMessage; + + /** + * Id assigned to the task by the platform. + */ + private String externalExecutionId; + + /** + * Error information available upon the failure of a task. + */ + private String errorMessage; + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message. + * + * @param message the detail message + */ + public UnexpectedTaskExecutionException(String message) { + super(message); + } + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message, cause and exitCode. + * + * @param message the detail message + * @param cause the cause which leads to this exception + */ + public UnexpectedTaskExecutionException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message and taskExecution. + * + * @param message the detail message + * @param taskExecution the taskExecution of the task + */ + public UnexpectedTaskExecutionException(String message, TaskExecution taskExecution) { + this(message); + assignTaskExecutionFields(taskExecution); + } + + /** + * Constructs an UnexpectedTaskExecutionException with the specified + * detail message, cause and taskExecution. + * + * @param message the detail message + * @param cause the cause which leads to this exception + * @param taskExecution the taskExecution of the task + */ + public UnexpectedTaskExecutionException(String message, Throwable cause, TaskExecution taskExecution) { + this(message, cause); + assignTaskExecutionFields(taskExecution); + } + + /** + * Assigns the task execution fields to this exception. + * + * @param taskExecution the task execution of which the fields should be assigned to this exception + */ + private void assignTaskExecutionFields(TaskExecution taskExecution) { + if(taskExecution != null) { + executionId = taskExecution.getExecutionId(); + parentExecutionId = taskExecution.getParentExecutionId(); + exitCode = taskExecution.getExitCode(); + taskName = taskExecution.getTaskName(); + startTime = taskExecution.getStartTime(); + endTime = taskExecution.getEndTime(); + externalExecutionId = taskExecution.getExternalExecutionId(); + errorMessage = taskExecution.getErrorMessage(); + exitMessage = taskExecution.getExitMessage(); + } + } + + public long getExecutionId() { + return this.executionId; + } + + /** + * Returns the exit code of the task. + * + * @return the exit code or -1 if the exit code couldn't be determined + */ + @Override + public int getExitCode() { + return this.exitCode; + } + + public String getTaskName() { + return this.taskName; + } + + public LocalDateTime getStartTime() { + return (this.startTime != null) ? this.startTime: null; + } + + public LocalDateTime getEndTime() { + return (this.endTime != null) ? this.endTime : null; + } + + public String getExitMessage() { + return this.exitMessage; + } + + public String getErrorMessage() { + return this.errorMessage; + } + + public String getExternalExecutionId() { + return this.externalExecutionId; + } + + public Long getParentExecutionId() { + return this.parentExecutionId; + } + +} \ No newline at end of file diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties index c29bf9ec2e..4c8de30b72 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/dataflow-configuration-metadata.properties @@ -1 +1 @@ -configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties +configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties, org.springframework.cloud.task.configuration.TaskProperties diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties index c29bf9ec2e..4c8de30b72 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata-whitelist.properties @@ -1 +1 @@ -configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties +configuration-properties.classes=org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties, org.springframework.cloud.task.configuration.TaskProperties diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata.json b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata.json new file mode 100644 index 0000000000..56390bb197 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/META-INF/spring-configuration-metadata.json @@ -0,0 +1,184 @@ +{ + "groups": [ + { + "name": "", + "type": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "composed-task-app-arguments", + "type": "java.util.Map", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "sourceMethod": "getComposedTaskAppArguments()" + }, + { + "name": "composed-task-app-properties", + "type": "java.util.Map", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "sourceMethod": "getComposedTaskAppProperties()" + } + , + { + "name": "task-app-properties", + "type": "org.springframework.cloud.task.configuration.TaskProperties", + "sourceType": "org.springframework.cloud.task.configuration.TaskProperties" + } + ], + "properties": [ + { + "name": "composed-task-arguments", + "type": "java.lang.String", + "description": "The arguments to be used for each of the tasks.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "composed-task-properties", + "type": "java.lang.String", + "description": "The properties to be used for each of the tasks as well as their deployments.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "dataflow-server-access-token", + "type": "java.lang.String", + "description": "The optional OAuth2 Access Token.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "dataflow-server-password", + "type": "java.lang.String", + "description": "The optional password for the dataflow server that will receive task launch requests. Used to access the the dataflow server using Basic Authentication. Not used if dataflowServerAccessToken is set.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "dataflow-server-uri", + "type": "java.net.URI", + "description": "The URI for the dataflow server that will receive task launch requests. Default is http:\/\/localhost:9393;", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "dataflow-server-username", + "type": "java.lang.String", + "description": "The optional username for the dataflow server that will receive task launch requests. Used to access the the dataflow server using Basic Authentication. Not used if dataflowServerAccessToken is set.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "increment-instance-enabled", + "type": "java.lang.Boolean", + "description": "Allows a single ComposedTaskRunner instance to be re-executed without changing the parameters using a incremented id from previous execution. Default is true. ", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": true + }, + { + "name": "uuid-instance-enabled", + "type": "java.lang.Boolean", + "description": "Allows a single ComposedTaskRunner instance to be re-executed without changing the parameters using a UUID for each launch. Default is false. If set to true it overrides increment-instance-enabled property.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": false + }, + { + "name": "interval-time-between-checks", + "type": "java.lang.Integer", + "description": "The amount of time in millis that the ComposedTaskRunner will wait between checks of the database to see if a task has completed.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": 10000 + }, + { + "name": "max-start-wait-time", + "type": "java.lang.Integer", + "description": "Determines the maximum time each child task is allowed for application startup. The default of `0` indicates no timeout.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": 0 + }, + { + "name": "max-wait-time", + "type": "java.lang.Integer", + "description": "The maximum amount of time in millis that a individual step can run before the execution of the Composed task is failed.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": 0 + }, + { + "name": "oauth2-client-credentials-client-id", + "type": "java.lang.String", + "description": "The OAuth2 Client Id (Used for the client credentials grant). If not null, then the following properties are ignored: dataflowServerUsername, dataflowServerPassword, dataflowServerAccessToken", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "oauth2-client-credentials-client-secret", + "type": "java.lang.String", + "description": "The OAuth2 Client Secret (Used for the client credentials grant).", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "oauth2-client-credentials-scopes", + "type": "java.util.Set", + "description": "OAuth2 Authorization scopes (Used for the client credentials grant).", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "oauth2-client-credentials-token-uri", + "type": "java.lang.String", + "description": "Token URI for the OAuth2 provider (Used for the client credentials grant).", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "platform-name", + "type": "java.lang.String", + "description": "The platform property that will be used for each task in the workflow when it is launched.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "transaction-isolation-level", + "type": "java.lang.String", + "description": "Establish the transaction isolation level for the Composed Task Runner. Default is ISOLATION_REPEATABLE_READ", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "skip-tls-certificate-verification", + "type": "java.lang.Boolean", + "description": "If true skips SSL certificate validation for SCDF server communication.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": false + }, + { + "name": "split-thread-allow-core-thread-timeout", + "type": "java.lang.Boolean", + "description": "Specifies whether to allow split core threads to timeout. Default is false;", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": false + }, + { + "name": "split-thread-core-pool-size", + "type": "java.lang.Integer", + "description": "Split's core pool size. Default is 4;", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": 4 + }, + { + "name": "split-thread-keep-alive-seconds", + "type": "java.lang.Integer", + "description": "Split's thread keep alive seconds. Default is 60.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": 60 + }, + { + "name": "split-thread-max-pool-size", + "type": "java.lang.Integer", + "description": "Split's maximum pool size. Default is Integer.MAX_VALUE.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "split-thread-queue-capacity", + "type": "java.lang.Integer", + "description": "Capacity for Split's BlockingQueue. Default is Integer.MAX_VALUE.", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties" + }, + { + "name": "split-thread-wait-for-tasks-to-complete-on-shutdown", + "type": "java.lang.Boolean", + "description": "Whether to wait for scheduled tasks to complete on shutdown, not interrupting running tasks and executing all tasks in the queue. Default is false;", + "sourceType": "org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties", + "defaultValue": false + } + ], + "hints": [] +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties b/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties new file mode 100644 index 0000000000..e4796a3f18 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties @@ -0,0 +1,2 @@ +spring.cloud.task.closecontext-enabled=true +logging.pattern.dateformat=yyyy-MM-dd HH:mm:ss.SSS diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerVisitorTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerVisitorTests.java index dcc4868ac4..4efeea8326 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerVisitorTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedRunnerVisitorTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ import java.util.List; import java.util.Set; +import javax.sql.DataSource; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; @@ -34,9 +35,11 @@ import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.explore.JobExplorer; import org.springframework.beans.factory.BeanCreationException; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.batch.BatchAutoConfiguration; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; @@ -45,15 +48,17 @@ import org.springframework.cloud.task.batch.configuration.TaskBatchAutoConfiguration; import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration; import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.support.JdbcTransactionManager; +import org.springframework.transaction.PlatformTransactionManager; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - +import static org.assertj.core.api.Assertions.assertThatThrownBy; /** * @author Glenn Renfro + * @author Corneil du Plessis */ public class ComposedRunnerVisitorTests { @@ -64,39 +69,48 @@ public class ComposedRunnerVisitorTests { private ConfigurableApplicationContext applicationContext; @AfterEach - public void tearDown() { + void tearDown() { if (this.applicationContext != null) { this.applicationContext.close(); } } @Test - public void singleTest() { + void singleTest() { setupContextForGraph("AAA"); Collection stepExecutions = getStepExecutions(); - assertEquals(1, stepExecutions.size()); + assertThat(stepExecutions).hasSize(1); + StepExecution stepExecution = stepExecutions.iterator().next(); + assertThat(stepExecution.getStepName()).isEqualTo("AAA_0"); + } + + @Test + void singleTestForuuIDIncrementer() { + setupContextForGraph("AAA", "--uuIdInstanceEnabled=true"); + Collection stepExecutions = getStepExecutions(); + assertThat(stepExecutions).hasSize(1); StepExecution stepExecution = stepExecutions.iterator().next(); - assertEquals("AAA_0", stepExecution.getStepName()); + assertThat(stepExecution.getStepName()).isEqualTo("AAA_0"); } @Test - public void testFailedGraph() { + void failedGraph() { setupContextForGraph("failedStep && AAA"); Collection stepExecutions = getStepExecutions(); - assertEquals(1, stepExecutions.size()); + assertThat(stepExecutions).hasSize(1); StepExecution stepExecution = stepExecutions.iterator().next(); - assertEquals("failedStep_0", stepExecution.getStepName()); + assertThat(stepExecution.getStepName()).isEqualTo("failedStep_0"); } @Test - public void testEmbeddedFailedGraph() { + void embeddedFailedGraph() { setupContextForGraph("AAA && failedStep && BBB"); Collection stepExecutions = getStepExecutions(); - assertEquals(2, stepExecutions.size()); + assertThat(stepExecutions).hasSize(2); List sortedStepExecution = getSortedStepExecutions(stepExecutions); - assertEquals("AAA_0", sortedStepExecution.get(0).getStepName()); - assertEquals("failedStep_0", sortedStepExecution.get(1).getStepName()); + assertThat(sortedStepExecution.get(0).getStepName()).isEqualTo("AAA_0"); + assertThat(sortedStepExecution.get(1).getStepName()).isEqualTo("failedStep_0"); } // @Ignore("Disabling till parser can support duplicate tasks") @@ -104,237 +118,238 @@ public void testEmbeddedFailedGraph() { public void duplicateTaskTest() { setupContextForGraph("AAA && AAA"); Collection stepExecutions = getStepExecutions(); - assertEquals(2, stepExecutions.size()); + assertThat(stepExecutions).hasSize(2); List sortedStepExecution = getSortedStepExecutions(stepExecutions); - assertEquals("AAA_1", sortedStepExecution.get(0).getStepName()); - assertEquals("AAA_0", sortedStepExecution.get(1).getStepName()); + assertThat(sortedStepExecution.get(0).getStepName()).isEqualTo("AAA_1"); + assertThat(sortedStepExecution.get(1).getStepName()).isEqualTo("AAA_0"); } @Test - public void testSequential() { + void sequential() { setupContextForGraph("AAA && BBB && CCC"); List stepExecutions = getSortedStepExecutions(getStepExecutions()); - assertEquals(3, stepExecutions.size()); + assertThat(stepExecutions).hasSize(3); Iterator iterator = stepExecutions.iterator(); StepExecution stepExecution = iterator.next(); - assertEquals("AAA_0", stepExecution.getStepName()); + assertThat(stepExecution.getStepName()).isEqualTo("AAA_0"); stepExecution = iterator.next(); - assertEquals("BBB_0", stepExecution.getStepName()); + assertThat(stepExecution.getStepName()).isEqualTo("BBB_0"); stepExecution = iterator.next(); - assertEquals("CCC_0", stepExecution.getStepName()); + assertThat(stepExecution.getStepName()).isEqualTo("CCC_0"); } @ParameterizedTest @ValueSource(ints = {1, 2, 3}) - public void splitTest(int threadCorePoolSize) { + void splitTest(int threadCorePoolSize) { setupContextForGraph("", "--splitThreadCorePoolSize=" + threadCorePoolSize); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(3, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("CCC_0")); + assertThat(stepExecutions).hasSize(3); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("CCC_0"); } @ParameterizedTest @ValueSource(ints = {2, 5}) - public void nestedSplit(int threadCorePoolSize) { + void nestedSplit(int threadCorePoolSize) { setupContextForGraph("< && CCC || DDD>", "--splitThreadCorePoolSize=" + threadCorePoolSize); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(4, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("DDD_0")); + assertThat(stepExecutions).hasSize(4); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("DDD_0"); } @Test - public void nestedSplitThreadPoolSize() { - Throwable exception = assertThrows(BeanCreationException.class, () -> - setupContextForGraph("< && CCC || && FFF>", "--splitThreadCorePoolSize=2")); - assertThat(exception.getCause().getCause().getMessage()).isEqualTo("Split thread core pool size 2 should be equal or greater than the " + + void nestedSplitThreadPoolSize() { + assertThatThrownBy(() -> + setupContextForGraph("< && CCC || && FFF>", "--splitThreadCorePoolSize=2") + ).hasCauseInstanceOf(BeanCreationException.class) + .hasRootCauseMessage("Split thread core pool size 2 should be equal or greater than the " + "depth of split flows 3. Try setting the composed task property " + "`splitThreadCorePoolSize`"); } - + @Test - public void sequentialNestedSplitThreadPoolSize() { + void sequentialNestedSplitThreadPoolSize() { setupContextForGraph("< || > && ", "--splitThreadCorePoolSize=3"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(6, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("DDD_0")); - assertTrue(stepNames.contains("EEE_0")); - assertTrue(stepNames.contains("FFF_0")); + assertThat(stepExecutions).hasSize(6); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("DDD_0"); + assertThat(stepNames).contains("EEE_0"); + assertThat(stepNames).contains("FFF_0"); } - + @Test - public void twoSplitTest() { + void twoSplitTest() { setupContextForGraph(" && "); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(5, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("DDD_0")); - assertTrue(stepNames.contains("EEE_0")); + assertThat(stepExecutions).hasSize(5); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("DDD_0"); + assertThat(stepNames).contains("EEE_0"); } @Test - public void testSequentialAndSplit() { + void sequentialAndSplit() { setupContextForGraph("AAA && && EEE"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(5, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("DDD_0")); - assertTrue(stepNames.contains("EEE_0")); + assertThat(stepExecutions).hasSize(5); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("DDD_0"); + assertThat(stepNames).contains("EEE_0"); List sortedStepExecution = getSortedStepExecutions(stepExecutions); - assertEquals("AAA_0", sortedStepExecution.get(0).getStepName()); - assertEquals("EEE_0", sortedStepExecution.get(4).getStepName()); + assertThat(sortedStepExecution.get(0).getStepName()).isEqualTo("AAA_0"); + assertThat(sortedStepExecution.get(4).getStepName()).isEqualTo("EEE_0"); } @Test - public void testSequentialTransitionAndSplit() { + void sequentialTransitionAndSplit() { setupContextForGraph("AAA && FFF 'FAILED' -> EEE && && DDD"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(5, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("DDD_0")); - assertTrue(stepNames.contains("FFF_0")); + assertThat(stepExecutions).hasSize(5); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("DDD_0"); + assertThat(stepNames).contains("FFF_0"); List sortedStepExecution = getSortedStepExecutions(stepExecutions); - assertEquals("AAA_0", sortedStepExecution.get(0).getStepName()); - assertEquals("DDD_0", sortedStepExecution.get(4).getStepName()); + assertThat(sortedStepExecution.get(0).getStepName()).isEqualTo("AAA_0"); + assertThat(sortedStepExecution.get(4).getStepName()).isEqualTo("DDD_0"); } @Test - public void testSequentialTransitionAndSplitFailedInvalid() { + void sequentialTransitionAndSplitFailedInvalid() { verifyExceptionThrown(INVALID_FLOW_MSG, "AAA && failedStep 'FAILED' -> EEE '*' -> FFF && && DDD"); } @Test - public void testSequentialTransitionAndSplitFailed() { + void sequentialTransitionAndSplitFailed() { setupContextForGraph("AAA && failedStep 'FAILED' -> EEE && FFF && && DDD"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(3, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("failedStep_0")); - assertTrue(stepNames.contains("EEE_0")); + assertThat(stepExecutions).hasSize(3); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("failedStep_0"); + assertThat(stepNames).contains("EEE_0"); } @Test - public void testSequentialAndFailedSplit() { + void sequentialAndFailedSplit() { setupContextForGraph("AAA && && EEE"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(4, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("DDD_0")); - assertTrue(stepNames.contains("failedStep_0")); + assertThat(stepExecutions).hasSize(4); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("DDD_0"); + assertThat(stepNames).contains("failedStep_0"); } @Test - public void testSequentialAndSplitWithFlow() { + void sequentialAndSplitWithFlow() { setupContextForGraph("AAA && && EEE"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(6, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("BBB_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("DDD_0")); - assertTrue(stepNames.contains("EEE_0")); - assertTrue(stepNames.contains("FFF_0")); + assertThat(stepExecutions).hasSize(6); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("BBB_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("DDD_0"); + assertThat(stepNames).contains("EEE_0"); + assertThat(stepNames).contains("FFF_0"); List sortedStepExecution = getSortedStepExecutions(stepExecutions); - assertEquals("AAA_0", sortedStepExecution.get(0).getStepName()); - assertEquals("EEE_0", sortedStepExecution.get(5).getStepName()); + assertThat(sortedStepExecution.get(0).getStepName()).isEqualTo("AAA_0"); + assertThat(sortedStepExecution.get(5).getStepName()).isEqualTo("EEE_0"); } @Test - public void testFailedBasicTransition() { + void failedBasicTransition() { setupContextForGraph("failedStep 'FAILED' -> AAA * -> BBB"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(2, stepExecutions.size()); - assertTrue(stepNames.contains("failedStep_0")); - assertTrue(stepNames.contains("AAA_0")); + assertThat(stepExecutions).hasSize(2); + assertThat(stepNames).contains("failedStep_0"); + assertThat(stepNames).contains("AAA_0"); } @Test - public void testSuccessBasicTransition() { + void successBasicTransition() { setupContextForGraph("AAA 'FAILED' -> BBB * -> CCC"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(2, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("CCC_0")); + assertThat(stepExecutions).hasSize(2); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("CCC_0"); } @Test - public void testSuccessBasicTransitionWithSequence() { + void successBasicTransitionWithSequence() { verifyExceptionThrown(INVALID_FLOW_MSG, "AAA 'FAILED' -> BBB * -> CCC && DDD && EEE"); } @Test - public void testSuccessBasicTransitionWithTransition() { + void successBasicTransitionWithTransition() { setupContextForGraph("AAA 'FAILED' -> BBB && CCC 'FAILED' -> DDD '*' -> EEE"); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(3, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("EEE_0")); + assertThat(stepExecutions).hasSize(3); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("EEE_0"); List sortedStepExecution = getSortedStepExecutions(stepExecutions); - assertEquals("AAA_0", sortedStepExecution.get(0).getStepName()); - assertEquals("EEE_0", sortedStepExecution.get(2).getStepName()); + assertThat(sortedStepExecution.get(0).getStepName()).isEqualTo("AAA_0"); + assertThat(sortedStepExecution.get(2).getStepName()).isEqualTo("EEE_0"); } @Test - public void testSequenceFollowedBySuccessBasicTransitionSequence() { + void sequenceFollowedBySuccessBasicTransitionSequence() { verifyExceptionThrown(INVALID_FLOW_MSG, "DDD && AAA 'FAILED' -> BBB * -> CCC && EEE"); } @Test - public void testWildCardOnlyInLastPosition() { + void wildCardOnlyInLastPosition() { setupContextForGraph("AAA 'FAILED' -> BBB && CCC * -> DDD "); Collection stepExecutions = getStepExecutions(); Set stepNames = getStepNames(stepExecutions); - assertEquals(3, stepExecutions.size()); - assertTrue(stepNames.contains("AAA_0")); - assertTrue(stepNames.contains("CCC_0")); - assertTrue(stepNames.contains("DDD_0")); + assertThat(stepExecutions).hasSize(3); + assertThat(stepNames).contains("AAA_0"); + assertThat(stepNames).contains("CCC_0"); + assertThat(stepNames).contains("DDD_0"); List sortedStepExecution = getSortedStepExecutions(stepExecutions); - assertEquals("AAA_0", sortedStepExecution.get(0).getStepName()); - assertEquals("DDD_0", sortedStepExecution.get(2).getStepName()); + assertThat(sortedStepExecution.get(0).getStepName()).isEqualTo("AAA_0"); + assertThat(sortedStepExecution.get(2).getStepName()).isEqualTo("DDD_0"); } @Test - public void failedStepTransitionWithDuplicateTaskNameTest() { + void failedStepTransitionWithDuplicateTaskNameTest() { verifyExceptionThrown( "Problems found when validating 'failedStep " + "'FAILED' -> BBB && CCC && BBB && EEE': " + @@ -344,7 +359,7 @@ public void failedStepTransitionWithDuplicateTaskNameTest() { } @Test - public void successStepTransitionWithDuplicateTaskNameTest() { + void successStepTransitionWithDuplicateTaskNameTest() { verifyExceptionThrown( "Problems found when validating 'AAA 'FAILED' -> " + "BBB * -> CCC && BBB && EEE': [166E:(pos 33): " + @@ -371,8 +386,10 @@ private void setupContextForGraph(String graph, String... args) { setupContextForGraph(argsForCtx.toArray(new String[0])); } - private void setupContextForGraph(String[] args) { - this.applicationContext = SpringApplication.run(new Class[]{ComposedRunnerVisitorConfiguration.class, + private void setupContextForGraph(String[] args) throws RuntimeException{ + this.applicationContext = SpringApplication. + run(new Class[]{ ComposedRunnerVisitorTestsConfiguration.class, + ComposedRunnerVisitorConfiguration.class, PropertyPlaceholderAutoConfiguration.class, EmbeddedDataSourceConfiguration.class, BatchAutoConfiguration.class, @@ -380,14 +397,16 @@ private void setupContextForGraph(String[] args) { SimpleTaskAutoConfiguration.class}, args); } + private Collection getStepExecutions() { JobExplorer jobExplorer = this.applicationContext.getBean(JobExplorer.class); List jobInstances = jobExplorer.findJobInstancesByJobName("job", 0, 1); - assertEquals(1, jobInstances.size()); + assertThat(jobInstances).hasSize(1); JobInstance jobInstance = jobInstances.get(0); List jobExecutions = jobExplorer.getJobExecutions(jobInstance); - assertEquals(1, jobExecutions.size()); + assertThat(jobExecutions).hasSize(1); JobExecution jobExecution = jobExecutions.get(0); + assertThat(jobExecution.getJobParameters().getParameters().get("ctr.id")).isNotNull(); return jobExecution.getStepExecutions(); } @@ -398,8 +417,18 @@ private List getSortedStepExecutions(Collection st } private void verifyExceptionThrown(String message, String graph) { - Throwable exception = assertThrows(BeanCreationException.class, () -> setupContextForGraph(graph)); - assertThat(exception.getCause().getCause().getMessage()).isEqualTo(message); + assertThatThrownBy(() -> setupContextForGraph(graph)) + .hasRootCauseMessage(message); + } + + @Configuration + public static class ComposedRunnerVisitorTestsConfiguration { + @Autowired + DataSource dataSource; + @Bean + public PlatformTransactionManager transactionManager() { + return new JdbcTransactionManager(dataSource); + } } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java index 42b5f77cc1..78b6263be0 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationJobIncrementerTests.java @@ -17,8 +17,6 @@ package org.springframework.cloud.dataflow.composedtaskrunner; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; - import org.springframework.batch.core.Job; import org.springframework.batch.core.JobParameters; @@ -29,22 +27,20 @@ import org.springframework.cloud.common.security.CommonSecurityAutoConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.configuration.DataFlowTestConfiguration; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.util.Assert; /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class, StepBeanDefinitionRegistrar.class}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) -@TestPropertySource(properties = {"graph=AAA && BBB && CCC","max-wait-time=1000", "increment-instance-enabled=true", "spring.cloud.task.name=footest"}) -public class ComposedTaskRunnerConfigurationJobIncrementerTests { +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +@TestPropertySource(properties = {"graph=AAA && BBB && CCC", "max-wait-time=1000", "increment-instance-enabled=true", "spring.cloud.task.name=footest"}) +class ComposedTaskRunnerConfigurationJobIncrementerTests { @Autowired private JobRepository jobRepository; @@ -54,7 +50,7 @@ public class ComposedTaskRunnerConfigurationJobIncrementerTests { @Test @DirtiesContext - public void testComposedConfigurationWithJobIncrementer() throws Exception { + void composedConfigurationWithJobIncrementer() throws Exception { this.jobRepository.createJobExecution( "ComposedTest", new JobParameters()); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java index 4c2e6c210e..a681cfd458 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,12 +16,10 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import java.util.ArrayList; -import java.util.HashMap; - +import java.util.Arrays; +import java.util.Collections; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -33,29 +31,29 @@ import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.cloud.common.security.CommonSecurityAutoConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.configuration.DataFlowTestConfiguration; +import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.rest.client.TaskOperations; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.Assert; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class, StepBeanDefinitionRegistrar.class}) -@TestPropertySource(properties = {"graph=AAA && BBB && CCC","max-wait-time=1000", "spring.cloud.task.name=foo"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) -public class ComposedTaskRunnerConfigurationNoPropertiesTests { +@TestPropertySource(properties = {"graph=AAA && BBB && CCC", "max-wait-time=1000", "spring.cloud.task.name=foo"}) +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +class ComposedTaskRunnerConfigurationNoPropertiesTests { @Autowired private JobRepository jobRepository; @@ -66,17 +64,26 @@ public class ComposedTaskRunnerConfigurationNoPropertiesTests { @Autowired private ApplicationContext context; + @Autowired + private ComposedTaskProperties composedTaskProperties; + @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + void composedConfiguration() throws Exception { JobExecution jobExecution = this.jobRepository.createJobExecution( "ComposedTest", new JobParameters()); TaskletStep ctrStep = context.getBean("AAA_0", TaskletStep.class); TaskOperations taskOperations = mock(TaskOperations.class); ReflectionTestUtils.setField(ctrStep.getTasklet(), "taskOperations", taskOperations); job.execute(jobExecution); + assertThat(composedTaskProperties.getTransactionIsolationLevel()).isEqualTo("ISOLATION_REPEATABLE_READ"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); - verify(taskOperations).launch("AAA", new HashMap<>(0), new ArrayList<>(0)); + + verify(taskOperations).launch( + "AAA", + Collections.emptyMap(), + Arrays.asList("--spring.cloud.task.parent-execution-id=1") + ); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java index e01c06e1cd..b9d2e78a87 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests.java @@ -20,7 +20,6 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -35,26 +34,26 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import static org.assertj.core.api.Assertions.assertThat; /** * @author Janne Valkealahti + * @author Corneil du Plessis */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) -@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", +@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC", "max-wait-time=1010", "interval-time-between-checks=1100", - "composed-task-app-arguments.app.AAA.0=--arg1=value1", - "composed-task-app-arguments.app.AAA.1=--arg2=value2", + "composed-task-app-arguments.app.AAA.0=--arg1=value1", + "composed-task-app-arguments.app.AAA.1=--arg2=value2", + "composed-task-app-arguments.base64_YXBwLiouMA=--arg3=value3", "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) -public class ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests { +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +class ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests { @Autowired private JobRepository jobRepository; @@ -70,7 +69,7 @@ public class ComposedTaskRunnerConfigurationWithAppArgumentsPropertiesTests { @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + void composedConfiguration() throws Exception { JobExecution jobExecution = this.jobRepository.createJobExecution( "ComposedTest", new JobParameters()); job.execute(jobExecution); @@ -83,9 +82,10 @@ public void testComposedConfiguration() throws Exception { TaskLauncherTasklet tasklet = ComposedTaskRunnerTaskletTestUtils.getTaskletLauncherTasklet(context, "ComposedTest-AAA_0"); List result = ComposedTaskRunnerTaskletTestUtils.getTaskletArgumentsViaReflection(tasklet); - assertThat(result).contains("--arg1=value1", "--arg2=value2"); - assertThat(result.size()).isEqualTo(2); + assertThat(result) + .contains("--arg1=value1", "--arg2=value2", "--arg3=value3") + .hasSize(3); Map taskletProperties = ComposedTaskRunnerTaskletTestUtils.getTaskletPropertiesViaReflection(tasklet); - assertThat(taskletProperties.size()).isEqualTo(0); + assertThat(taskletProperties).isEmpty(); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java index 30465d8ea8..f0a0899c55 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests.java @@ -21,7 +21,6 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -36,28 +35,27 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.util.Assert; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Glenn Renfro + * @author Corneil du Plessis */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) -@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", +@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC", "max-wait-time=1010", + "skip-tls-certificate-verification=true", "composed-task-app-properties.app.AAA.format=yyyy", "interval-time-between-checks=1100", "composed-task-arguments=--baz=boo --AAA.foo=bar BBB.que=qui", "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) -public class ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests { +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +class ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests { @Autowired private JobRepository jobRepository; @@ -73,7 +71,7 @@ public class ComposedTaskRunnerConfigurationWithPropertiesNoLabelTests { @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + void composedConfiguration() throws Exception { JobExecution jobExecution = this.jobRepository.createJobExecution( "ComposedTest", new JobParameters()); job.execute(jobExecution); @@ -82,19 +80,21 @@ public void testComposedConfiguration() throws Exception { props.put("app.AAA.format", "yyyy"); Map composedTaskAppProperties = new HashMap<>(1); composedTaskAppProperties.put("app.AAA.format", "yyyy"); - - assertEquals(composedTaskAppProperties, composedTaskProperties.getComposedTaskAppProperties()); - assertEquals(1010, composedTaskProperties.getMaxWaitTime()); - assertEquals(1100, composedTaskProperties.getIntervalTimeBetweenChecks()); - assertEquals("https://bar/", composedTaskProperties.getDataflowServerUri().toASCIIString()); + assertThat(composedTaskProperties.isSkipTlsCertificateVerification()).isTrue(); + assertThat(composedTaskProperties.getComposedTaskAppProperties()).isEqualTo(composedTaskAppProperties); + assertThat(composedTaskProperties.getMaxWaitTime()).isEqualTo(1010); + assertThat(composedTaskProperties.getIntervalTimeBetweenChecks()).isEqualTo(1100); + assertThat(composedTaskProperties.getDataflowServerUri().toASCIIString()).isEqualTo("https://bar/"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); TaskLauncherTasklet tasklet = ComposedTaskRunnerTaskletTestUtils.getTaskletLauncherTasklet(context, "ComposedTest-AAA_0"); List result = ComposedTaskRunnerTaskletTestUtils.getTaskletArgumentsViaReflection(tasklet); - assertThat(result).contains("--baz=boo --foo=bar"); - assertThat(result.size()).isEqualTo(1); + assertThat(result) + .contains("--baz=boo --foo=bar") + .hasSize(1); Map taskletProperties = ComposedTaskRunnerTaskletTestUtils.getTaskletPropertiesViaReflection(tasklet); - assertThat(taskletProperties.size()).isEqualTo(1); - assertThat(taskletProperties.get("app.AAA.format")).isEqualTo("yyyy"); + assertThat(taskletProperties) + .hasSize(1) + .containsEntry("app.AAA.format", "yyyy"); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java index 5fa739af7f..5d4e2cf653 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,6 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -36,32 +35,32 @@ import org.springframework.cloud.dataflow.composedtaskrunner.configuration.DataFlowTestConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.Assert; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; /** * @author Glenn Renfro */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class, StepBeanDefinitionRegistrar.class}) -@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", - "composed-task-properties=" + ComposedTaskRunnerConfigurationWithPropertiesTests.COMPOSED_TASK_PROPS , +@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC", "max-wait-time=1010", + "composed-task-properties=" + ComposedTaskRunnerConfigurationWithPropertiesTests.COMPOSED_TASK_PROPS, "interval-time-between-checks=1100", "composed-task-arguments=--baz=boo --AAA.foo=bar BBB.que=qui", - "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) -public class ComposedTaskRunnerConfigurationWithPropertiesTests { + "transaction-isolation-level=ISOLATION_READ_COMMITTED", "spring.cloud.task.closecontext-enabled=true", + "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest", "max-start-wait-time=1011"}) +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +class ComposedTaskRunnerConfigurationWithPropertiesTests { @Autowired private JobRepository jobRepository; @@ -72,16 +71,22 @@ public class ComposedTaskRunnerConfigurationWithPropertiesTests { @Autowired private ComposedTaskProperties composedTaskProperties; + @Autowired + private TaskProperties taskProperties; + @Autowired ApplicationContext context; protected static final String COMPOSED_TASK_PROPS = "app.ComposedTest-AAA.format=yyyy, " - + "app.ComposedTest-BBB.format=mm, " - + "deployer.ComposedTest-AAA.memory=2048m"; + + "app.ComposedTest-BBB.format=mm, " + + "deployer.ComposedTest-AAA.memory=2048m"; @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + void composedConfiguration() throws Exception { + assertThat(composedTaskProperties.isSkipTlsCertificateVerification()).isFalse(); + + JobExecution jobExecution = this.jobRepository.createJobExecution( "ComposedTest", new JobParameters()); TaskletStep ctrStep = context.getBean("ComposedTest-AAA_0", TaskletStep.class); @@ -93,13 +98,19 @@ public void testComposedConfiguration() throws Exception { Map props = new HashMap<>(1); props.put("format", "yyyy"); props.put("memory", "2048m"); - assertEquals(COMPOSED_TASK_PROPS, composedTaskProperties.getComposedTaskProperties()); - assertEquals(1010, composedTaskProperties.getMaxWaitTime()); - assertEquals(1100, composedTaskProperties.getIntervalTimeBetweenChecks()); - assertEquals("https://bar/", composedTaskProperties.getDataflowServerUri().toASCIIString()); - List args = new ArrayList<>(1); + assertThat(composedTaskProperties.getComposedTaskProperties()).isEqualTo(COMPOSED_TASK_PROPS); + assertThat(composedTaskProperties.getMaxWaitTime()).isEqualTo(1010); + assertThat(composedTaskProperties.getMaxStartWaitTime()).isEqualTo(1011); + assertThat(composedTaskProperties.getIntervalTimeBetweenChecks()).isEqualTo(1100); + assertThat(composedTaskProperties.getDataflowServerUri().toASCIIString()).isEqualTo("https://bar/"); + assertThat(composedTaskProperties.getTransactionIsolationLevel()).isEqualTo("ISOLATION_READ_COMMITTED"); + assertThat(taskProperties.getClosecontextEnabled()).isTrue(); + + List args = new ArrayList<>(2); args.add("--baz=boo --foo=bar"); + args.add("--spring.cloud.task.parent-execution-id=1"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); + verify(taskOperations).launch("ComposedTest-AAA", props, args); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java index 7ee7872331..dc615aa669 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.java @@ -22,7 +22,8 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -37,29 +38,29 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.util.Assert; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Glenn Renfro + * @author Corneil du Plessis */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) -@TestPropertySource(properties = {"graph=ComposedTest-l1 && ComposedTest-l2 && ComposedTest-l3","max-wait-time=1010", +@TestPropertySource(properties = {"graph=ComposedTest-l1 && ComposedTest-l2 && ComposedTest-l11", "max-wait-time=1010", "composed-task-app-properties.app.l1.AAA.format=yyyy", + "composed-task-app-properties.app.l11.AAA.format=yyyy", + "composed-task-app-properties.app.l2.AAA.format=yyyy", "interval-time-between-checks=1100", "composed-task-arguments=--baz=boo", "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) -public class ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests { - +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +class ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests { + private static final Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests.class); @Autowired private JobRepository jobRepository; @@ -74,29 +75,37 @@ public class ComposedTaskRunnerConfigurationWithPropertiesWithLabelTests { @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + void composedConfiguration() throws Exception { JobExecution jobExecution = this.jobRepository.createJobExecution( - "ComposedTest", new JobParameters()); + "ComposedTest", new JobParameters()); job.execute(jobExecution); Map props = new HashMap<>(1); props.put("app.l1.AAA.format", "yyyy"); - Map composedTaskAppProperties = new HashMap<>(1); + props.put("app.l2.AAA.format", "yyyy"); + props.put("app.l11.AAA.format", "yyyy"); + Map composedTaskAppProperties = new HashMap<>(); composedTaskAppProperties.put("app.l1.AAA.format", "yyyy"); + composedTaskAppProperties.put("app.l2.AAA.format", "yyyy"); + composedTaskAppProperties.put("app.l11.AAA.format", "yyyy"); - assertEquals(composedTaskAppProperties, composedTaskProperties.getComposedTaskAppProperties()); - assertEquals(1010, composedTaskProperties.getMaxWaitTime()); - assertEquals(1100, composedTaskProperties.getIntervalTimeBetweenChecks()); - assertEquals("https://bar/", composedTaskProperties.getDataflowServerUri().toASCIIString()); + assertThat(composedTaskProperties.getComposedTaskAppProperties()).isEqualTo(composedTaskAppProperties); + assertThat(composedTaskProperties.getMaxWaitTime()).isEqualTo(1010); + assertThat(composedTaskProperties.getIntervalTimeBetweenChecks()).isEqualTo(1100); + assertThat(composedTaskProperties.getDataflowServerUri().toASCIIString()).isEqualTo("https://bar/"); List args = new ArrayList<>(1); args.add("--baz=boo"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); TaskLauncherTasklet tasklet = ComposedTaskRunnerTaskletTestUtils.getTaskletLauncherTasklet(context, "ComposedTest-l1_0"); List result = ComposedTaskRunnerTaskletTestUtils.getTaskletArgumentsViaReflection(tasklet); - assertThat(result).contains("--baz=boo"); - assertThat(result.size()).isEqualTo(1); + assertThat(result) + .contains("--baz=boo") + .hasSize(1); Map taskletProperties = ComposedTaskRunnerTaskletTestUtils.getTaskletPropertiesViaReflection(tasklet); - assertThat(taskletProperties.size()).isEqualTo(1); - assertThat(taskletProperties.get("app.l1.AAA.format")).isEqualTo("yyyy"); + logger.info("taskletProperties:{}", taskletProperties); + assertThat(taskletProperties.keySet()).containsExactly("app.l1.AAA.format"); + assertThat(taskletProperties) + .hasSize(1) + .containsEntry("app.l1.AAA.format", "yyyy"); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java index 213de991f2..2c752dc08a 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithVersionPropertiesTests.java @@ -22,7 +22,6 @@ import java.util.Map; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; @@ -37,25 +36,24 @@ import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskRunnerTaskletTestUtils; import org.springframework.context.ApplicationContext; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import static org.assertj.core.api.Assertions.assertThat; /** * @author Janne Valkealahti + * @author Corneil du Plessis */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class}) -@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC","max-wait-time=1010", -"composed-task-properties=" + ComposedTaskRunnerConfigurationWithVersionPropertiesTests.COMPOSED_TASK_PROPS , +@TestPropertySource(properties = {"graph=ComposedTest-AAA && ComposedTest-BBB && ComposedTest-CCC", "max-wait-time=1010", + "composed-task-properties=" + ComposedTaskRunnerConfigurationWithVersionPropertiesTests.COMPOSED_TASK_PROPS, "interval-time-between-checks=1100", "composed-task-arguments=--baz=boo --AAA.foo=bar BBB.que=qui", "dataflow-server-uri=https://bar", "spring.cloud.task.name=ComposedTest"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) -public class ComposedTaskRunnerConfigurationWithVersionPropertiesTests { +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +class ComposedTaskRunnerConfigurationWithVersionPropertiesTests { @Autowired private JobRepository jobRepository; @@ -73,7 +71,7 @@ public class ComposedTaskRunnerConfigurationWithVersionPropertiesTests { @Test @DirtiesContext - public void testComposedConfiguration() throws Exception { + void composedConfiguration() throws Exception { JobExecution jobExecution = this.jobRepository.createJobExecution( "ComposedTest", new JobParameters()); job.execute(jobExecution); @@ -90,10 +88,12 @@ public void testComposedConfiguration() throws Exception { TaskLauncherTasklet tasklet = ComposedTaskRunnerTaskletTestUtils.getTaskletLauncherTasklet(context, "ComposedTest-AAA_0"); List result = ComposedTaskRunnerTaskletTestUtils.getTaskletArgumentsViaReflection(tasklet); - assertThat(result).contains("--baz=boo --foo=bar"); - assertThat(result.size()).isEqualTo(1); + assertThat(result) + .contains("--baz=boo --foo=bar") + .hasSize(1); Map taskletProperties = ComposedTaskRunnerTaskletTestUtils.getTaskletPropertiesViaReflection(tasklet); - assertThat(taskletProperties.size()).isEqualTo(1); - assertThat(taskletProperties.get("version.AAA")).isEqualTo("1.0.0"); + assertThat(taskletProperties) + .hasSize(1) + .containsEntry("version.AAA", "1.0.0"); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java index 061b083292..547916c209 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,45 +19,56 @@ import javax.sql.DataSource; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; - import org.springframework.batch.core.Step; import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; +import org.springframework.boot.sql.init.dependency.DependsOnDatabaseInitialization; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.cloud.common.security.CommonSecurityAutoConfiguration; +import org.springframework.cloud.dataflow.composedtaskrunner.configuration.DataFlowTestConfiguration; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.rest.client.TaskOperations; import org.springframework.cloud.task.configuration.TaskConfigurer; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.cloud.task.repository.TaskNameResolver; import org.springframework.cloud.task.repository.TaskRepository; +import org.springframework.cloud.task.repository.support.SimpleTaskNameResolver; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.transaction.PlatformTransactionManager; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; /** * @author Glenn Renfro + * @author Corneil du Plessis */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskRunnerStepFactoryTests.StepFactoryConfiguration.class}) -public class ComposedTaskRunnerStepFactoryTests { +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, + ComposedTaskRunnerConfiguration.class, + StepBeanDefinitionRegistrar.class}) +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) +@TestPropertySource(properties = {"graph=FOOBAR", "max-wait-time=1000", "increment-instance-enabled=true", "spring.cloud.task.name=footest"}) +class ComposedTaskRunnerStepFactoryTests { @Autowired ComposedTaskRunnerStepFactory stepFactory; @Test - public void testStep() throws Exception{ + void step() throws Exception { Step step = stepFactory.getObject(); - assertEquals("FOOBAR", step.getName()); - assertEquals(Integer.MAX_VALUE, step.getStartLimit()); + assertThat(step).isNotNull(); + assertThat(step.getName()).isEqualTo("FOOBAR_0"); + assertThat(step.getStartLimit()).isEqualTo(Integer.MAX_VALUE); } @Configuration @@ -80,11 +91,12 @@ public TaskProperties taskProperties() { } @Bean - public StepBuilderFactory steps(){ - return new StepBuilderFactory(mock(JobRepository.class), mock(PlatformTransactionManager.class)); + public StepBuilder steps() { + return new StepBuilder("foo", mock(JobRepository.class)); } @Bean + @DependsOnDatabaseInitialization public TaskConfigurer taskConfigurer() { return new TaskConfigurer() { @Override @@ -106,12 +118,12 @@ public TaskExplorer getTaskExplorer() { public DataSource getTaskDataSource() { return mock(DataSource.class); } - }; - } - @Bean - public ComposedTaskRunnerStepFactory stepFactory(TaskProperties taskProperties) { - return new ComposedTaskRunnerStepFactory(new ComposedTaskProperties(), "FOOBAR", "BAR"); + @Override + public TaskNameResolver getTaskNameResolver() { + return new SimpleTaskNameResolver(); + } + }; } } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java index 06a22c168e..ee12fb8319 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import java.util.Date; +import java.time.LocalDateTime; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -26,9 +26,8 @@ import org.springframework.batch.core.StepExecution; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; -import org.springframework.test.util.ReflectionTestUtils; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -36,7 +35,7 @@ /** * @author Glenn Renfro */ -public class ComposedTaskStepExecutionListenerTests { +class ComposedTaskStepExecutionListenerTests { private TaskExplorer taskExplorer; @@ -45,51 +44,49 @@ public class ComposedTaskStepExecutionListenerTests { private ComposedTaskStepExecutionListener taskListener; @BeforeEach - public void setup() { + void setup() { this.taskExplorer = mock(TaskExplorer.class); this.stepExecution = getStepExecution(); - this.taskListener = - new ComposedTaskStepExecutionListener(this.taskExplorer); - ReflectionTestUtils.setField(this.taskListener, "taskExplorer", this.taskExplorer); + this.taskListener = new ComposedTaskStepExecutionListener(taskExplorer); } @Test - public void testSuccessfulRun() { + void successfulRun() { TaskExecution taskExecution = getDefaultTaskExecution(0, null); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); - assertEquals(ExitStatus.COMPLETED, this.taskListener.afterStep(this.stepExecution)); + populateExecutionContext(taskExecution.getTaskName(),111L); + assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(ExitStatus.COMPLETED); } @Test - public void testExitMessageRunSuccess() { + void exitMessageRunSuccess() { ExitStatus expectedTaskStatus = new ExitStatus("TEST_EXIT_MESSAGE"); TaskExecution taskExecution = getDefaultTaskExecution(0, expectedTaskStatus.getExitCode()); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L); - assertEquals(expectedTaskStatus, this.taskListener.afterStep(this.stepExecution)); + assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(expectedTaskStatus); } @Test - public void testExitMessageRunFail() { + void exitMessageRunFail() { ExitStatus expectedTaskStatus = new ExitStatus("TEST_EXIT_MESSAGE"); TaskExecution taskExecution = getDefaultTaskExecution(1, expectedTaskStatus.getExitCode()); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L); - assertEquals(expectedTaskStatus, this.taskListener.afterStep(this.stepExecution)); + assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(expectedTaskStatus); } @Test - public void testFailedRun() { + void failedRun() { TaskExecution taskExecution = getDefaultTaskExecution(1, null); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L); - assertEquals(ExitStatus.FAILED, this.taskListener.afterStep(this.stepExecution)); + assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(ExitStatus.FAILED); } // @Test(IllegalArgumentException.class) @@ -108,17 +105,18 @@ private StepExecution getStepExecution() { return new StepExecution(STEP_NAME, jobExecution); } - private void populateExecutionContext(Long taskExecutionId) { - this.stepExecution.getExecutionContext().put("task-execution-id", - taskExecutionId); + private void populateExecutionContext(String taskName, Long taskExecutionId) { + this.stepExecution.getExecutionContext().put("task-name", taskName); + this.stepExecution.getExecutionContext().put("task-execution-id", taskExecutionId); } private TaskExecution getDefaultTaskExecution (int exitCode, String exitMessage) { TaskExecution taskExecution = new TaskExecution(); + taskExecution.setTaskName("test-ctr"); taskExecution.setExitMessage(exitMessage); taskExecution.setExitCode(exitCode); - taskExecution.setEndTime(new Date()); + taskExecution.setEndTime(LocalDateTime.now()); return taskExecution; } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java index 6a4da19354..4a10783c42 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,34 +16,45 @@ package org.springframework.cloud.dataflow.composedtaskrunner; +import java.time.LocalDateTime; import java.util.ArrayList; -import java.util.Date; +import java.util.Collections; import java.util.List; import javax.sql.DataSource; -import org.assertj.core.api.Assertions; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.UnexpectedJobExecutionException; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.scope.context.ChunkContext; import org.springframework.batch.core.scope.context.StepContext; +import org.springframework.batch.item.ExecutionContext; import org.springframework.batch.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.boot.sql.init.dependency.DependsOnDatabaseInitialization; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; +import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; import org.springframework.cloud.dataflow.composedtaskrunner.support.TaskExecutionTimeoutException; +import org.springframework.cloud.dataflow.composedtaskrunner.support.UnexpectedTaskExecutionException; import org.springframework.cloud.dataflow.rest.client.DataFlowClientException; +import org.springframework.cloud.dataflow.rest.client.DataFlowOperations; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; +import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; @@ -56,29 +67,32 @@ import org.springframework.cloud.task.repository.support.TaskRepositoryInitializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; import org.springframework.hateoas.Link; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.hateoas.mediatype.vnderrors.VndErrors; +import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; +import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; +import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.web.client.ResourceAccessException; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; /** * @author Glenn Renfro + * @author Corneil du Plessis */ -@ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, +@SpringJUnitConfig(classes = {EmbeddedDataSourceConfiguration.class, org.springframework.cloud.dataflow.composedtaskrunner.TaskLauncherTaskletTests.TestConfiguration.class}) -public class TaskLauncherTaskletTests { - +class TaskLauncherTaskletTests { + private final static Logger logger = LoggerFactory.getLogger(TaskLauncherTaskletTests.class); private static final String TASK_NAME = "testTask1_0"; @Autowired @@ -93,21 +107,31 @@ public class TaskLauncherTaskletTests { @Autowired private JdbcTaskExecutionDao taskExecutionDao; + @Autowired + private Environment environment; private TaskOperations taskOperations; private TaskRepository taskRepository; private TaskExplorer taskExplorer; + private ObjectMapper mapper; + @BeforeEach - public void setup() throws Exception{ + void setup() throws Exception{ + if (this.mapper == null) { + this.mapper = new ObjectMapper(); + this.mapper.registerModule(new Jdk8Module()); + this.mapper.registerModule(new Jackson2HalModule()); + this.mapper.registerModule(new JavaTimeModule()); + this.mapper.registerModule(new Jackson2DataflowModule()); + } this.taskRepositoryInitializer.setDataSource(this.dataSource); - this.taskRepositoryInitializer.afterPropertiesSet(); this.taskOperations = mock(TaskOperations.class); TaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean = - new TaskExecutionDaoFactoryBean(this.dataSource); + new TaskExecutionDaoFactoryBean(this.dataSource); this.taskRepository = new SimpleTaskRepository(taskExecutionDaoFactoryBean); this.taskExplorer = new SimpleTaskExplorer(taskExecutionDaoFactoryBean); this.composedTaskProperties.setIntervalTimeBetweenChecks(500); @@ -115,109 +139,152 @@ public void setup() throws Exception{ @Test @DirtiesContext - public void testTaskLauncherTasklet() throws Exception{ + void taskLauncherTasklet() { createCompleteTaskExecution(0); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); mockReturnValForTaskExecution(1L); execute(taskLauncherTasklet, null, chunkContext); - assertEquals(1L, chunkContext.getStepContext() + assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() - .get("task-execution-id")); + .get("task-execution-id")).isEqualTo(1L); mockReturnValForTaskExecution(2L); chunkContext = chunkContext(); createCompleteTaskExecution(0); taskLauncherTasklet = getTaskExecutionTasklet(); execute(taskLauncherTasklet, null, chunkContext); - assertEquals(2L, chunkContext.getStepContext() + assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() - .get("task-execution-id")); + .get("task-execution-id")).isEqualTo(2L); } @Test @DirtiesContext - public void testTaskLauncherTaskletWithTaskExecutionId() throws Exception{ - TaskLauncherTasklet taskLauncherTasklet = prepTaskLauncherTests(); + void invalidTaskOperations() { + TaskLauncherTasklet taskLauncherTasklet = new TestTaskLauncherTasklet( + null, + null, + this.taskExplorer, + this.composedTaskProperties, + TASK_NAME, + new TaskProperties(), + environment, + mapper + ); + assertThatThrownBy( + () -> execute(taskLauncherTasklet, null, chunkContext()) + ).isInstanceOf(ComposedTaskException.class) + .hasMessage( + "Unable to connect to Data Flow Server to execute task operations. " + + "Verify that Data Flow Server's tasks/definitions endpoint can be accessed."); + } + @Test + @DirtiesContext + void taskLauncherTaskletWithTaskExecutionId() { TaskProperties taskProperties = new TaskProperties(); taskProperties.setExecutionid(88L); mockReturnValForTaskExecution(2L); ChunkContext chunkContext = chunkContext(); createCompleteTaskExecution(0); - taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); taskLauncherTasklet.setArguments(null); execute(taskLauncherTasklet, null, chunkContext); - assertEquals(2L, chunkContext.getStepContext() + assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() - .get("task-execution-id")); - assertEquals("--spring.cloud.task.parent-execution-id=88", ((List)chunkContext.getStepContext() + .get("task-execution-id")).isEqualTo(2L); + assertThat(((List) chunkContext.getStepContext() .getStepExecution().getExecutionContext() - .get("task-arguments")).get(0)); + .get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=88"); + } + + @Test + @DirtiesContext + void taskLauncherTaskletWithoutTaskExecutionId() { + + mockReturnValForTaskExecution(2L); + ChunkContext chunkContext = chunkContext(); + JobExecution jobExecution = new JobExecution(0L, new JobParameters()); + + createAndStartCompleteTaskExecution(0, jobExecution); + + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); + taskLauncherTasklet.setArguments(null); + StepExecution stepExecution = new StepExecution("stepName", jobExecution, 0L); + StepContribution contribution = new StepContribution(stepExecution); + execute(taskLauncherTasklet, contribution, chunkContext); + ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext(); + logger.info("execution-context:{}", executionContext.entrySet()); + assertThat(executionContext.get("task-execution-id")).isEqualTo(2L); + assertThat(executionContext.get("task-arguments")).as("task-arguments not null").isNotNull(); + assertThat(((List) executionContext.get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=1"); } + @SuppressWarnings("unchecked") @Test @DirtiesContext - public void testTaskLauncherTaskletWithTaskExecutionIdWithPreviousParentID() throws Exception{ + void taskLauncherTaskletWithTaskExecutionIdWithPreviousParentID() { - TaskLauncherTasklet taskLauncherTasklet = prepTaskLauncherTests(); TaskProperties taskProperties = new TaskProperties(); taskProperties.setExecutionid(88L); mockReturnValForTaskExecution(2L); ChunkContext chunkContext = chunkContext(); createCompleteTaskExecution(0); - chunkContext.getStepContext() - .getStepExecution().getExecutionContext().put("task-arguments", new ArrayList()); - ((List)chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-arguments")).add("--spring.cloud.task.parent-execution-id=84"); - taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); + ExecutionContext executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext(); + executionContext.put("task-arguments", new ArrayList()); + List taskArguments = (List) executionContext.get("task-arguments"); + assertThat(taskArguments).isNotNull().as("taskArguments"); + taskArguments.add("--spring.cloud.task.parent-execution-id=84"); + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(taskProperties); taskLauncherTasklet.setArguments(null); execute(taskLauncherTasklet, null, chunkContext); - assertEquals(2L, chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-execution-id")); - assertEquals("--spring.cloud.task.parent-execution-id=88", ((List)chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-arguments")).get(0)); + executionContext = chunkContext.getStepContext().getStepExecution().getExecutionContext(); + taskArguments = (List) executionContext.get("task-arguments"); + assertThat(executionContext.get("task-execution-id")).isEqualTo(2L); + assertThat(((List) taskArguments).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=88"); } - private TaskLauncherTasklet prepTaskLauncherTests() throws Exception{ - createCompleteTaskExecution(0); - TaskLauncherTasklet taskLauncherTasklet = - getTaskExecutionTasklet(); - ChunkContext chunkContext = chunkContext(); + @Test + @DirtiesContext + void taskLauncherTaskletStartTimeout() { mockReturnValForTaskExecution(1L); - execute(taskLauncherTasklet, null, chunkContext); - assertEquals(1L, chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-execution-id")); - assertNull(chunkContext.getStepContext() - .getStepExecution().getExecutionContext() - .get("task-arguments")); - return taskLauncherTasklet; + this.composedTaskProperties.setMaxStartWaitTime(500); + this.composedTaskProperties.setIntervalTimeBetweenChecks(1000); + TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); + ChunkContext chunkContext = chunkContext(); + assertThatThrownBy(() -> execute(taskLauncherTasklet, null, chunkContext)) + .isInstanceOf(TaskExecutionTimeoutException.class) + .hasMessage("Timeout occurred during startup of task with Execution Id 1"); + + createCompleteTaskExecution(0); + this.composedTaskProperties.setMaxStartWaitTime(500); + this.composedTaskProperties.setIntervalTimeBetweenChecks(1000); + TaskLauncherTasklet taskLauncherTaskletNoTimeout = getTaskExecutionTasklet(); + execute(taskLauncherTaskletNoTimeout, null, chunkContext); + // expect no exception } @Test @DirtiesContext - public void testTaskLauncherTaskletTimeout() { + void taskLauncherTaskletTimeout() { mockReturnValForTaskExecution(1L); this.composedTaskProperties.setMaxWaitTime(500); this.composedTaskProperties.setIntervalTimeBetweenChecks(1000); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); - Throwable exception = assertThrows(TaskExecutionTimeoutException.class, () -> execute(taskLauncherTasklet, null, chunkContext)); - Assertions.assertThat(exception.getMessage()).isEqualTo("Timeout occurred while " + - "processing task with Execution Id 1"); + assertThatThrownBy(() -> execute(taskLauncherTasklet, null, chunkContext)) + .isInstanceOf(TaskExecutionTimeoutException.class) + .hasMessage("Timeout occurred while processing task with Execution Id 1"); } @Test @DirtiesContext - public void testInvalidTaskName() { + void invalidTaskName() { final String ERROR_MESSAGE = "Could not find task definition named " + TASK_NAME; - VndErrors errors = new VndErrors("message", ERROR_MESSAGE, new Link("ref")); + VndErrors errors = new VndErrors("message", ERROR_MESSAGE, Link.of("ref")); Mockito.doThrow(new DataFlowClientException(errors)) .when(this.taskOperations) .launch(ArgumentMatchers.anyString(), @@ -225,14 +292,14 @@ public void testInvalidTaskName() { ArgumentMatchers.any()); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); - Throwable exception = assertThrows(DataFlowClientException.class, - () -> taskLauncherTasklet.execute(null, chunkContext)); - Assertions.assertThat(exception.getMessage()).isEqualTo(ERROR_MESSAGE); + assertThatThrownBy( + () -> taskLauncherTasklet.execute(null, chunkContext) + ).isInstanceOf(DataFlowClientException.class).hasMessage(ERROR_MESSAGE); } @Test @DirtiesContext - public void testNoDataFlowServer() { + void noDataFlowServer() { final String ERROR_MESSAGE = "I/O error on GET request for \"http://localhost:9393/": Connection refused; nested exception is java.net.ConnectException: Connection refused"; Mockito.doThrow(new ResourceAccessException(ERROR_MESSAGE)) @@ -241,25 +308,29 @@ public void testNoDataFlowServer() { ArgumentMatchers.any()); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); - Throwable exception = assertThrows(ResourceAccessException.class, - () -> execute(taskLauncherTasklet, null, chunkContext)); - Assertions.assertThat(exception.getMessage()).isEqualTo(ERROR_MESSAGE); + assertThatThrownBy(() -> execute(taskLauncherTasklet, null, chunkContext)) + .isInstanceOf(ResourceAccessException.class) + .hasMessage(ERROR_MESSAGE); } @Test @DirtiesContext - public void testTaskLauncherTaskletFailure() { + void taskLauncherTaskletFailure() { mockReturnValForTaskExecution(1L); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); - createCompleteTaskExecution(1); - Throwable exception = assertThrows(UnexpectedJobExecutionException.class, - () -> execute(taskLauncherTasklet, null, chunkContext)); - Assertions.assertThat(exception.getMessage()).isEqualTo("Task returned a non zero exit code."); + createCompleteTaskExecution(1, "This is the exit message of the task itself."); + assertThatThrownBy(() -> execute(taskLauncherTasklet, null, chunkContext)) + .isInstanceOf(UnexpectedTaskExecutionException.class) + .hasMessage("Task returned a non zero exit code.") + .matches(x -> ((UnexpectedTaskExecutionException) x).getExitCode() == 1) + .matches(x -> ((UnexpectedTaskExecutionException) x).getExitMessage() + .equals("This is the exit message of the task itself.")) + .matches(x -> ((UnexpectedTaskExecutionException) x).getEndTime() != null); } private RepeatStatus execute(TaskLauncherTasklet taskLauncherTasklet, StepContribution contribution, - ChunkContext chunkContext) throws Exception{ + ChunkContext chunkContext) { RepeatStatus status = taskLauncherTasklet.execute(contribution, chunkContext); if (!status.isContinuable()) { throw new IllegalStateException("Expected continuable status for the first execution."); @@ -270,63 +341,134 @@ private RepeatStatus execute(TaskLauncherTasklet taskLauncherTasklet, StepContri @Test @DirtiesContext - public void testTaskLauncherTaskletNullResult() throws Exception { - boolean isException = false; + void taskLauncherTaskletNullResult() { mockReturnValForTaskExecution(1L); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); getCompleteTaskExecutionWithNull(); - Throwable exception = assertThrows(UnexpectedJobExecutionException.class, - () -> execute(taskLauncherTasklet, null, chunkContext)); - Assertions.assertThat(exception.getMessage()).isEqualTo("Task returned a null exit code."); + assertThatThrownBy(() -> execute(taskLauncherTasklet, null, chunkContext)) + .isInstanceOf(UnexpectedTaskExecutionException.class) + .hasMessage("Task returned a null exit code."); } @Test - public void testTaskOperationsConfiguredWithMissingPassword() { + void taskOperationsConfiguredWithMissingPassword() { try { final ComposedTaskProperties composedTaskProperties = new ComposedTaskProperties(); composedTaskProperties.setDataflowServerUsername("foo"); TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, composedTaskProperties, - TASK_NAME, new TaskProperties()); + TASK_NAME, new TaskProperties(), environment, mapper); taskLauncherTasklet.taskOperations(); } catch (IllegalArgumentException e) { - assertEquals("A username may be specified only together with a password", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("A username may be specified only together with a password"); return; } fail("Expected an IllegalArgumentException to be thrown"); } + @Test + @DirtiesContext + void taskLauncherTaskletIgnoreExitMessage() { + createCompleteTaskExecution(0); + + TaskLauncherTasklet taskLauncherTasklet = + getTaskExecutionTasklet(); + taskLauncherTasklet.setArguments(Collections.singletonList("--ignoreExitMessage=true")); + ChunkContext chunkContext = chunkContext(); + mockReturnValForTaskExecution(1L); + execute(taskLauncherTasklet, null, chunkContext); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); + } + + @Test + @DirtiesContext + void taskLauncherTaskletIgnoreExitMessageViaProperties() { + createCompleteTaskExecution(0); + + TaskLauncherTasklet taskLauncherTasklet = + getTaskExecutionTasklet(); + taskLauncherTasklet.setProperties(Collections.singletonMap("app.foo." + TaskLauncherTasklet.IGNORE_EXIT_MESSAGE_PROPERTY, "true")); + ChunkContext chunkContext = chunkContext(); + mockReturnValForTaskExecution(1L); + execute(taskLauncherTasklet, null, chunkContext); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); + } @Test - public void testTaskOperationsConfiguredWithMissingUsername() { + @DirtiesContext + void taskLauncherTaskletIgnoreExitMessageViaCommandLineOverride() { + createCompleteTaskExecution(0); + + TaskLauncherTasklet taskLauncherTasklet = + getTaskExecutionTasklet(); + taskLauncherTasklet.setArguments(Collections.singletonList("--ignoreExitMessage=false")); + taskLauncherTasklet.setProperties(Collections.singletonMap("app.foo." + TaskLauncherTasklet.IGNORE_EXIT_MESSAGE_PROPERTY, "true")); + ChunkContext chunkContext = chunkContext(); + mockReturnValForTaskExecution(1L); + execute(taskLauncherTasklet, null, chunkContext); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("task-execution-id")).isEqualTo(1L); + boolean value = chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); + assertThat((Boolean) chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isFalse(); + } + + + @Test + void taskOperationsConfiguredWithMissingUsername() { try { final ComposedTaskProperties composedTaskProperties = new ComposedTaskProperties(); composedTaskProperties.setDataflowServerPassword("bar"); TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, composedTaskProperties, - TASK_NAME, new TaskProperties()); + TASK_NAME, new TaskProperties(), environment, mapper); taskLauncherTasklet.taskOperations(); } catch (IllegalArgumentException e) { - assertEquals("A password may be specified only together with a username", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("A password may be specified only together with a username"); return; } fail("Expected an IllegalArgumentException to be thrown"); } + private void createCompleteTaskExecution(int exitCode, String... message) { + TaskExecution taskExecution = this.taskRepository.createTaskExecution(); + this.taskRepository.completeTaskExecution(taskExecution.getExecutionId(), + exitCode, LocalDateTime.now(), message != null && message.length > 0 ? message[0] : ""); + } - private void createCompleteTaskExecution(int exitCode) { + private void createAndStartCompleteTaskExecution(int exitCode, JobExecution jobExecution) { TaskExecution taskExecution = this.taskRepository.createTaskExecution(); + JdbcTaskBatchDao taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + taskBatchDao.saveRelationship(taskExecution, jobExecution); this.taskRepository.completeTaskExecution(taskExecution.getExecutionId(), - exitCode, new Date(), ""); + exitCode, LocalDateTime.now(), ""); } private TaskExecution getCompleteTaskExecutionWithNull() { TaskExecution taskExecution = this.taskRepository.createTaskExecution(); - taskExecutionDao.completeTaskExecution(taskExecution.getExecutionId(), null, new Date(), "hello", "goodbye"); + taskExecutionDao.completeTaskExecution(taskExecution.getExecutionId(), null, LocalDateTime.now(), + "hello", "goodbye"); return taskExecution; } @@ -337,7 +479,7 @@ private TaskLauncherTasklet getTaskExecutionTasklet() { private TaskLauncherTasklet getTaskExecutionTasklet(TaskProperties taskProperties) { TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, this.composedTaskProperties, - TASK_NAME, taskProperties); + TASK_NAME, taskProperties, environment, mapper); ReflectionTestUtils.setField(taskLauncherTasklet, "taskOperations", this.taskOperations); return taskLauncherTasklet; } @@ -354,7 +496,7 @@ private ChunkContext chunkContext () } private void mockReturnValForTaskExecution(long executionId) { - Mockito.doReturn(executionId) + Mockito.doReturn(new LaunchResponseResource(executionId)) .when(this.taskOperations) .launch(ArgumentMatchers.anyString(), ArgumentMatchers.any(), @@ -362,11 +504,11 @@ private void mockReturnValForTaskExecution(long executionId) { } @Configuration - @EnableBatchProcessing @EnableConfigurationProperties(ComposedTaskProperties.class) public static class TestConfiguration { @Bean + @DependsOnDatabaseInitialization TaskRepositoryInitializer taskRepositoryInitializer() { return new TaskRepositoryInitializer(new TaskProperties()); } @@ -377,4 +519,26 @@ TaskExecutionDao taskExecutionDao(DataSource dataSource) { } } + + private static class TestTaskLauncherTasklet extends TaskLauncherTasklet { + public TestTaskLauncherTasklet( + ClientRegistrationRepository clientRegistrations, + OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient, + TaskExplorer taskExplorer, + ComposedTaskProperties composedTaskProperties, String taskName, + TaskProperties taskProperties, + Environment environment, + ObjectMapper mapper) { + super(clientRegistrations, clientCredentialsTokenResponseClient,taskExplorer,composedTaskProperties,taskName,taskProperties, environment, mapper); + } + + @Override + protected DataFlowOperations dataFlowOperations() { + DataFlowOperations dataFlowOperations = Mockito.mock(DataFlowOperations.class); + Mockito.doReturn(null) + .when(dataFlowOperations) + .taskOperations(); + return dataFlowOperations; + } + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/configuration/ComposedRunnerVisitorConfiguration.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/configuration/ComposedRunnerVisitorConfiguration.java index 1126386af0..3b8ffff91f 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/configuration/ComposedRunnerVisitorConfiguration.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/configuration/ComposedRunnerVisitorConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,9 +21,9 @@ import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.StepExecutionListener; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; -import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.batch.core.step.tasklet.Tasklet; import org.springframework.batch.repeat.RepeatStatus; import org.springframework.beans.factory.annotation.Autowired; @@ -35,6 +35,7 @@ import org.springframework.context.annotation.Configuration; import org.springframework.core.task.TaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Isolation; import org.springframework.transaction.interceptor.DefaultTransactionAttribute; import org.springframework.transaction.interceptor.TransactionAttribute; @@ -44,12 +45,14 @@ * @author Ilayaperumal Gopinathan */ @Configuration -@EnableBatchProcessing @EnableConfigurationProperties(ComposedTaskProperties.class) public class ComposedRunnerVisitorConfiguration { @Autowired - private StepBuilderFactory steps; + private JobRepository jobRepository; + + @Autowired + private PlatformTransactionManager transactionManager; @Autowired private ComposedTaskProperties composedTaskProperties; @@ -173,26 +176,28 @@ public ExitStatus afterStep(StepExecution stepExecution) { private Step createTaskletStepWithListener(final String taskName, StepExecutionListener stepExecutionListener) { - return this.steps.get(taskName) + StepBuilder stepBuilder = new StepBuilder(taskName, jobRepository); + return stepBuilder .tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { return RepeatStatus.FINISHED; } - }) + }, this.transactionManager) .transactionAttribute(getTransactionAttribute()) .listener(stepExecutionListener) .build(); } private Step createTaskletStep(final String taskName) { - return this.steps.get(taskName) + StepBuilder stepBuilder = new StepBuilder(taskName, jobRepository); + return stepBuilder .tasklet(new Tasklet() { @Override public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { return RepeatStatus.FINISHED; } - }) + }, transactionManager) .transactionAttribute(getTransactionAttribute()) .build(); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java index 21dbd40c6b..ca437ef8cc 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/properties/ComposedTaskPropertiesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,83 +25,112 @@ import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.runner.ApplicationContextRunner; +import org.springframework.cloud.dataflow.core.Base64Utils; import org.springframework.core.env.StandardEnvironment; import org.springframework.core.env.SystemEnvironmentPropertySource; +import org.springframework.security.oauth2.core.ClientAuthenticationMethod; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; /** * @author Glenn Renfro * @author Gunnar Hillert + * @author Corneil du Plessis */ -public class ComposedTaskPropertiesTests { +class ComposedTaskPropertiesTests { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner(); @Test - public void testGettersAndSetters() throws URISyntaxException{ + void gettersAndSetters() throws URISyntaxException{ ComposedTaskProperties properties = new ComposedTaskProperties(); properties.setComposedTaskProperties("aaa"); properties.setComposedTaskArguments("bbb"); properties.setIntervalTimeBetweenChecks(12345); properties.setMaxWaitTime(6789); + properties.setMaxStartWaitTime(101112); properties.setDataflowServerUri(new URI("http://test/")); properties.setGraph("ddd"); properties.setDataflowServerUsername("foo"); properties.setDataflowServerPassword("bar"); properties.setDataflowServerAccessToken("foobar"); - assertEquals("aaa", properties.getComposedTaskProperties()); - assertEquals("bbb", properties.getComposedTaskArguments()); - assertEquals(12345, properties.getIntervalTimeBetweenChecks()); - assertEquals(6789, properties.getMaxWaitTime()); - assertEquals("http://test/", properties.getDataflowServerUri().toString()); - assertEquals("ddd", properties.getGraph()); - assertEquals("foo", properties.getDataflowServerUsername()); - assertEquals("bar", properties.getDataflowServerPassword()); - assertEquals("foobar", properties.getDataflowServerAccessToken()); + properties.setSkipTlsCertificateVerification(true); + assertThat(properties.getComposedTaskProperties()).isEqualTo("aaa"); + assertThat(properties.getComposedTaskArguments()).isEqualTo("bbb"); + assertThat(properties.getIntervalTimeBetweenChecks()).isEqualTo(12345); + assertThat(properties.getMaxWaitTime()).isEqualTo(6789); + assertThat(properties.getMaxStartWaitTime()).isEqualTo(101112); + assertThat(properties.getDataflowServerUri()).hasToString("http://test/"); + assertThat(properties.getGraph()).isEqualTo("ddd"); + assertThat(properties.getDataflowServerUsername()).isEqualTo("foo"); + assertThat(properties.getDataflowServerPassword()).isEqualTo("bar"); + assertThat(properties.getDataflowServerAccessToken()).isEqualTo("foobar"); + assertThat(properties.isSkipTlsCertificateVerification()).isTrue(); + assertThat(properties.isUuidInstanceEnabled()).isTrue(); + properties.setUuidInstanceEnabled(true); + assertThat(properties.isUuidInstanceEnabled()).isTrue(); } @Test - public void testDataflowServerURIDefaults() { + void dataflowServerURIDefaults() { ComposedTaskProperties properties = new ComposedTaskProperties(); - assertEquals("http://localhost:9393/", properties.getDataflowServerUri().toString()); + assertThat(properties.getDataflowServerUri()).hasToString("http://localhost:9393/"); } @Test - public void testThreadDefaults() { + void skipSslVerificationDefaults() { ComposedTaskProperties properties = new ComposedTaskProperties(); - assertEquals(ComposedTaskProperties.SPLIT_THREAD_CORE_POOL_SIZE_DEFAULT, properties.getSplitThreadCorePoolSize()); - assertEquals(ComposedTaskProperties.SPLIT_THREAD_KEEP_ALIVE_SECONDS_DEFAULT, properties.getSplitThreadKeepAliveSeconds()); - assertEquals(ComposedTaskProperties.SPLIT_THREAD_MAX_POOL_SIZE_DEFAULT, properties.getSplitThreadMaxPoolSize()); - assertEquals(ComposedTaskProperties.SPLIT_THREAD_QUEUE_CAPACITY_DEFAULT, properties.getSplitThreadQueueCapacity()); - assertEquals("http://localhost:9393/", properties.getDataflowServerUri().toString()); - assertFalse(properties.isSplitThreadAllowCoreThreadTimeout()); - assertFalse(properties.isSplitThreadWaitForTasksToCompleteOnShutdown()); - assertNull(properties.getDataflowServerUsername()); - assertNull(properties.getDataflowServerPassword()); + assertThat(properties.isSkipTlsCertificateVerification()).isFalse(); } @Test - public void testComposedTaskAppArguments() { + void threadDefaults() { + ComposedTaskProperties properties = new ComposedTaskProperties(); + assertThat(properties.getSplitThreadCorePoolSize()).isEqualTo(ComposedTaskProperties.SPLIT_THREAD_CORE_POOL_SIZE_DEFAULT); + assertThat(properties.getSplitThreadKeepAliveSeconds()).isEqualTo(ComposedTaskProperties.SPLIT_THREAD_KEEP_ALIVE_SECONDS_DEFAULT); + assertThat(properties.getSplitThreadMaxPoolSize()).isEqualTo(ComposedTaskProperties.SPLIT_THREAD_MAX_POOL_SIZE_DEFAULT); + assertThat(properties.getSplitThreadQueueCapacity()).isEqualTo(ComposedTaskProperties.SPLIT_THREAD_QUEUE_CAPACITY_DEFAULT); + assertThat(properties.getDataflowServerUri()).hasToString("http://localhost:9393/"); + assertThat(properties.isSplitThreadAllowCoreThreadTimeout()).isFalse(); + assertThat(properties.isSplitThreadWaitForTasksToCompleteOnShutdown()).isFalse(); + assertThat(properties.getDataflowServerUsername()).isNull(); + assertThat(properties.getDataflowServerPassword()).isNull(); + } + + @Test + void composedTaskAppArguments() { this.contextRunner .withInitializer(context -> { Map map = new HashMap<>(); map.put("composed-task-app-arguments.app.AAA", "arg1"); map.put("composed-task-app-arguments.app.AAA.1", "arg2"); map.put("composed-task-app-arguments.app.AAA.2", "arg3"); + map.put("composed-task-app-arguments." + Base64Utils.encode("app.*.3"), Base64Utils.encode("arg4")); + map.put("composed-task-app-arguments." + Base64Utils.encode("app.*.4"), "arg5"); context.getEnvironment().getPropertySources().addLast(new SystemEnvironmentPropertySource( StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME, map)); }) .withUserConfiguration(Config1.class) .run((context) -> { ComposedTaskProperties properties = context.getBean(ComposedTaskProperties.class); - assertThat(properties.getComposedTaskAppArguments()).hasSize(3); + assertThat(properties.getComposedTaskAppArguments()).hasSize(5); assertThat(properties.getComposedTaskAppArguments()).containsEntry("app.AAA", "arg1"); assertThat(properties.getComposedTaskAppArguments()).containsEntry("app.AAA.1", "arg2"); assertThat(properties.getComposedTaskAppArguments()).containsEntry("app.AAA.2", "arg3"); + assertThat(Base64Utils.decodeMap(properties.getComposedTaskAppArguments())).containsEntry("app.*.3", "arg4"); + assertThat(Base64Utils.decodeMap(properties.getComposedTaskAppArguments())).containsEntry("app.*.4", "arg5"); + }); + } + + @Test + void assignmentOfOauth2ClientCredentialsClientAuthenticationMethod(){ + this.contextRunner + .withSystemProperties("OAUTH2_CLIENT_CREDENTIALS_CLIENT_AUTHENTICATION_METHOD=client_secret_post") + .withUserConfiguration(Config1.class).run((context) -> { + ComposedTaskProperties properties = context.getBean(ComposedTaskProperties.class); + assertThat(properties.getOauth2ClientCredentialsClientAuthenticationMethod()) + .withFailMessage("The OAuth2 client credentials client authentication method couldn't be assigned correctly.") + .isEqualTo(ClientAuthenticationMethod.CLIENT_SECRET_POST); }); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java index 707a96c31f..a1870ad360 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/support/OnOAuth2ClientCredentialsEnabledTests.java @@ -24,38 +24,39 @@ import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; -import static org.hamcrest.Matchers.equalTo; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Gunnar Hillert + * @author Corneil du Plessis */ -public class OnOAuth2ClientCredentialsEnabledTests { +class OnOAuth2ClientCredentialsEnabledTests { private AnnotationConfigApplicationContext context; @AfterEach - public void teardown() { + void teardown() { if (this.context != null) { this.context.close(); } } @Test - public void noPropertySet() throws Exception { + void noPropertySet() throws Exception { this.context = load(Config.class); - org.hamcrest.MatcherAssert.assertThat(context.containsBean("myBean"), equalTo(false)); + assertThat(context.containsBean("myBean")).isEqualTo(false); } @Test - public void propertyClientId() throws Exception { + void propertyClientId() throws Exception { this.context = load(Config.class, "oauth2-client-credentials-client-id:12345"); - org.hamcrest.MatcherAssert.assertThat(context.containsBean("myBean"), equalTo(true)); + assertThat(context.containsBean("myBean")).isEqualTo(true); } @Test - public void clientIdOnlyWithNoValue() throws Exception { + void clientIdOnlyWithNoValue() throws Exception { this.context = load(Config.class, "oauth2-client-credentials-client-id:"); - org.hamcrest.MatcherAssert.assertThat(context.containsBean("myBean"), equalTo(false)); + assertThat(context.containsBean("myBean")).isEqualTo(false); } private AnnotationConfigApplicationContext load(Class config, String... env) { diff --git a/spring-cloud-dataflow-configuration-metadata/pom.xml b/spring-cloud-dataflow-configuration-metadata/pom.xml index f4fa2e65a7..7082a44ea8 100644 --- a/spring-cloud-dataflow-configuration-metadata/pom.xml +++ b/spring-cloud-dataflow-configuration-metadata/pom.xml @@ -4,16 +4,23 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-configuration-metadata + spring-cloud-dataflow-configuration-metadata + Spring Cloud Data Flow Configuration Metadata + UTF-8 + true + 3.4.1 org.springframework.cloud spring-cloud-dataflow-container-registry + ${project.version} org.springframework.boot @@ -28,6 +35,10 @@ org.springframework spring-web + + io.projectreactor.netty + reactor-netty + com.fasterxml.jackson.core jackson-core @@ -42,7 +53,7 @@ org.springframework.boot - spring-boot-loader + spring-boot-loader-classic org.springframework.boot @@ -59,4 +70,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.13.0 + + true + ${java.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolver.java b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolver.java index a3e449ba70..e6a1910610 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolver.java +++ b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolver.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2017 the original author or authors. + * Copyright 2015-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -63,4 +63,14 @@ public URLClassLoader createAppClassLoader(Resource app) { public abstract List listProperties(Resource metadataResource, boolean exhaustive); public abstract Map> listPortNames(Resource metadataResource); + + /** + * Return information about option grouping which is coming from an additional + * metadata files meant to provide more details which options belong together. + * Keys in a map are arbitrary id's and values a full options id's. + * + * @param metadataResource the metadata resource + * @return map of option groups + */ + public abstract Map> listOptionGroups(Resource metadataResource); } diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfiguration.java b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfiguration.java index b0d8dc2180..a2631ec097 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfiguration.java +++ b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfiguration.java @@ -16,12 +16,12 @@ package org.springframework.cloud.dataflow.configuration.metadata; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.cloud.dataflow.configuration.metadata.container.ContainerImageMetadataResolver; import org.springframework.cloud.dataflow.configuration.metadata.container.DefaultContainerImageMetadataResolver; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryService; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; /** * Automatically exposes an {@link ApplicationConfigurationMetadataResolver} if none is already registered. @@ -29,7 +29,7 @@ * @author Eric Bottard * @author Christian Tzolov */ -@Configuration +@AutoConfiguration public class ApplicationConfigurationMetadataResolverAutoConfiguration { @Bean diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java index 9a591c5fda..01c182acbd 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java +++ b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolver.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -74,13 +74,11 @@ public class BootApplicationConfigurationMetadataResolver extends ApplicationCon private static final String CONFIGURATION_METADATA_PATTERN = "classpath*:/META-INF/spring-configuration-metadata.json"; - // this is superseded by name prefixed with dataflow and will get removed in future - private static final String DEPRECATED_SPRING_CONFIGURATION_PROPERTIES = "classpath*:/META-INF/spring-configuration-metadata-whitelist.properties"; + private static final String VISIBLE_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-metadata.properties"; - // this is superseded by VISIBLE_PROPERTIES - private static final String DEPRECATED_DATAFLOW_CONFIGURATION_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-metadata-whitelist.properties"; + private static final String PORT_MAPPING_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-port-mapping.properties"; - private static final String VISIBLE_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-metadata.properties"; + private static final String OPTION_GROUPS_PROPERTIES = "classpath*:/META-INF/dataflow-configuration-option-groups.properties"; private static final String CONFIGURATION_PROPERTIES_CLASSES = "configuration-properties.classes"; @@ -90,6 +88,8 @@ public class BootApplicationConfigurationMetadataResolver extends ApplicationCon private static final String CONFIGURATION_PROPERTIES_OUTBOUND_PORTS = "configuration-properties.outbound-ports"; + private static final String CONFIGURATION_PROPERTIES_OPTION_GROUPS = "org.springframework.cloud.dataflow.configuration-properties.option-groups"; + private static final String CONTAINER_IMAGE_CONFIGURATION_METADATA_LABEL_NAME = "org.springframework.cloud.dataflow.spring-configuration-metadata.json"; private final Set globalVisibleProperties = new HashSet<>(); @@ -124,23 +124,9 @@ public BootApplicationConfigurationMetadataResolver(ClassLoader parent, private static Resource[] visibleConfigurationMetadataResources(ClassLoader classLoader) throws IOException { ResourcePatternResolver resourcePatternResolver = new PathMatchingResourcePatternResolver(classLoader); Resource[] configurationResources = resourcePatternResolver.getResources(VISIBLE_PROPERTIES); - - Resource[] deprecatedSpringConfigurationResources = resourcePatternResolver - .getResources(DEPRECATED_SPRING_CONFIGURATION_PROPERTIES); - if (deprecatedSpringConfigurationResources.length > 0) { - logger.warn("The use of " + DEPRECATED_SPRING_CONFIGURATION_PROPERTIES + " is a deprecated. Please use " - + VISIBLE_PROPERTIES + " instead."); - } - Resource[] deprecatedDataflowConfigurationResources = resourcePatternResolver - .getResources(DEPRECATED_DATAFLOW_CONFIGURATION_PROPERTIES); - if (deprecatedDataflowConfigurationResources.length > 0) { - logger.warn("The use of " + DEPRECATED_DATAFLOW_CONFIGURATION_PROPERTIES - + " is a deprecated. Please use " + VISIBLE_PROPERTIES + " instead."); - } - - return concatArrays(configurationResources, deprecatedSpringConfigurationResources, - deprecatedDataflowConfigurationResources); - + Resource[] portMappingResources = resourcePatternResolver.getResources(PORT_MAPPING_PROPERTIES); + Resource[] groupingResources = resourcePatternResolver.getResources(OPTION_GROUPS_PROPERTIES); + return concatArrays(configurationResources, portMappingResources, groupingResources); } private static Resource[] concatArrays(final Resource[]... arrays) { @@ -205,6 +191,31 @@ public Map> listPortNames(Resource app) { return Collections.emptyMap(); } + @Override + public Map> listOptionGroups(Resource app) { + try { + if (app != null) { + if (isDockerSchema(app.getURI())) { + return resolveOptionGroupsFromContainerImage(app.getURI()); + } + else { + Archive archive = resolveAsArchive(app); + return listOptionGroups(archive); + } + } + } + catch (Exception e) { + logger.warn("Failed to retrieve port names for resource {} because of {}", + app, ExceptionUtils.getRootCauseMessage(e)); + if (logger.isDebugEnabled()) { + logger.debug("(Details) for failed to retrieve port names for resource:" + app, e); + } + return Collections.emptyMap(); + } + + return Collections.emptyMap(); + } + private boolean isDockerSchema(URI uri) { return uri != null && uri.getScheme() != null && uri.getScheme().contains("docker"); } @@ -262,6 +273,22 @@ private Map> resolvePortNamesFromContainerImage(URI imageUri return portsMap; } + private Map> resolveOptionGroupsFromContainerImage(URI imageUri) { + String imageName = imageUri.getSchemeSpecificPart(); + Map labels = this.containerImageMetadataResolver.getImageLabels(imageName); + if (CollectionUtils.isEmpty(labels)) { + return Collections.emptyMap(); + } + Map> groupingsMap = new HashMap<>(); + labels.entrySet().stream() + .filter(e -> e.getKey().startsWith(CONFIGURATION_PROPERTIES_OPTION_GROUPS)) + .forEach(e -> { + String gKey = e.getKey().substring(CONFIGURATION_PROPERTIES_OPTION_GROUPS.length() + 1); + groupingsMap.put(gKey, new HashSet<>(Arrays.asList(StringUtils.delimitedListToStringArray(e.getValue(), ",", " ")))); + }); + return groupingsMap; + } + public List listProperties(Archive archive, boolean exhaustive) { try (URLClassLoader moduleClassLoader = new BootClassLoaderFactory(archive, parent).createClassLoader()) { List result = new ArrayList<>(); @@ -318,7 +345,6 @@ private Map> listPortNames(Archive archive) { Set inboundPorts = new HashSet<>(); Set outboundPorts = new HashSet<>(); Map> portsMap = new HashMap<>(); - for (Resource resource : visibleConfigurationMetadataResources(moduleClassLoader)) { Properties properties = new Properties(); properties.load(resource.getInputStream()); @@ -340,6 +366,28 @@ private Map> listPortNames(Archive archive) { } } + private Map> listOptionGroups(Archive archive) { + try (URLClassLoader moduleClassLoader = new BootClassLoaderFactory(archive, parent).createClassLoader()) { + Map> groupingsMap = new HashMap<>(); + for (Resource resource : visibleConfigurationMetadataResources(moduleClassLoader)) { + Properties properties = new Properties(); + properties.load(resource.getInputStream()); + for(String key : properties.stringPropertyNames()) { + if (key.startsWith(CONFIGURATION_PROPERTIES_OPTION_GROUPS)) { + String value = properties.getProperty(key); + String gKey = key.substring(CONFIGURATION_PROPERTIES_OPTION_GROUPS.length() + 1); + groupingsMap.put(gKey, new HashSet<>(Arrays.asList(StringUtils.delimitedListToStringArray(value, ",", " ")))); + } + } + } + return groupingsMap; + } + catch (Exception e) { + throw new AppMetadataResolutionException( + "Exception trying to list configuration properties option groups for application " + archive, e); + } + } + @Override public URLClassLoader createAppClassLoader(Resource app) { try { diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootClassLoaderFactory.java b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootClassLoaderFactory.java index cb51292ab9..25997e5ced 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootClassLoaderFactory.java +++ b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/BootClassLoaderFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,27 +26,26 @@ /** * Strategy interface for creating a ClassLoader that mimics the one used when a boot - * uber-jar runs. + * uber jar runs. * * @author Eric Bottard + * @author Chris Bono */ public class BootClassLoaderFactory { - private static final String BOOT_13_LIBS_LOCATION = "lib/"; + private static final String BOOT_LIBS_LOCATION = "BOOT-INF/lib/"; - private static final String BOOT_14_LIBS_LOCATION = "BOOT-INF/lib/"; - - private static final String BOOT_14_CLASSESS_LOCATION = "BOOT-INF/classes/"; + private static final String BOOT_CLASSES_LOCATION = "BOOT-INF/classes/"; private final Archive archive; private final ClassLoader parent; /** - * Create a new factory for dealing with the given boot uberjar archive. + * Create a new factory for dealing with the given boot uber archive. * - * @param archive a boot uberjar Archive - * @param parent the parent classloader to set for new created ClassLoaders + * @param archive a boot uber archive + * @param parent the parent classloader to set for created classloader */ public BootClassLoaderFactory(Archive archive, ClassLoader parent) { this.archive = archive; @@ -54,31 +53,16 @@ public BootClassLoaderFactory(Archive archive, ClassLoader parent) { } public URLClassLoader createClassLoader() { - boolean useBoot14Layout = false; - for (Archive.Entry entry : archive) { - if (entry.getName().startsWith(BOOT_14_LIBS_LOCATION)) { - useBoot14Layout = true; - break; - } - } - - ClassLoaderExposingLauncher launcher = useBoot14Layout ? new Boot14ClassLoaderExposingLauncher() - : new Boot13ClassLoaderExposingLauncher(); - - return launcher.createClassLoader(); + return new ClassLoaderExposingLauncher().createClassLoader(); } - private abstract class ClassLoaderExposingLauncher extends ExecutableArchiveLauncher { + private class ClassLoaderExposingLauncher extends ExecutableArchiveLauncher { + ClassLoaderExposingLauncher() { super(archive); } - @Override - protected ClassLoader createClassLoader(URL[] urls) throws Exception { - return new LaunchedURLClassLoader(urls, parent); - } - - public URLClassLoader createClassLoader() { + URLClassLoader createClassLoader() { try { return (URLClassLoader) createClassLoader(getClassPathArchivesIterator()); } @@ -87,30 +71,19 @@ public URLClassLoader createClassLoader() { } } - } - - private class Boot13ClassLoaderExposingLauncher extends ClassLoaderExposingLauncher { - - @Override - protected boolean isNestedArchive(Archive.Entry entry) { - return !entry.isDirectory() && entry.getName().startsWith(BOOT_13_LIBS_LOCATION); - } - @Override - protected void postProcessClassPathArchives(List archives) throws Exception { - archives.add(0, getArchive()); + protected ClassLoader createClassLoader(URL[] urls) { + return new LaunchedURLClassLoader(urls, parent); } - } - private class Boot14ClassLoaderExposingLauncher extends ClassLoaderExposingLauncher { @Override protected boolean isNestedArchive(Archive.Entry entry) { - return (!entry.isDirectory() && entry.getName().startsWith(BOOT_14_LIBS_LOCATION)) - || (entry.isDirectory() && entry.getName().equals(BOOT_14_CLASSESS_LOCATION)); + return (!entry.isDirectory() && entry.getName().startsWith(BOOT_LIBS_LOCATION)) + || (entry.isDirectory() && entry.getName().equals(BOOT_CLASSES_LOCATION)); } @Override - protected void postProcessClassPathArchives(List archives) throws Exception { + protected void postProcessClassPathArchives(List archives) { archives.add(0, getArchive()); } } diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java index 0af21b76cb..44f93a8d81 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java +++ b/spring-cloud-dataflow-configuration-metadata/src/main/java/org/springframework/cloud/dataflow/configuration/metadata/container/DefaultContainerImageMetadataResolver.java @@ -20,6 +20,7 @@ import java.util.Map; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryException; +import org.springframework.cloud.dataflow.container.registry.ContainerRegistryProperties; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryRequest; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryService; import org.springframework.util.StringUtils; @@ -30,6 +31,7 @@ * * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class DefaultContainerImageMetadataResolver implements ContainerImageMetadataResolver { @@ -39,6 +41,7 @@ public DefaultContainerImageMetadataResolver(ContainerRegistryService containerR this.containerRegistryService = containerRegistryService; } + @SuppressWarnings("unchecked") @Override public Map getImageLabels(String imageName) { @@ -48,12 +51,23 @@ public Map getImageLabels(String imageName) { ContainerRegistryRequest registryRequest = this.containerRegistryService.getRegistryRequest(imageName); - Map manifest = this.containerRegistryService.getImageManifest(registryRequest, Map.class); - - if (manifest != null && !isNotNullMap(manifest.get("config"))) { - throw new ContainerRegistryException( - String.format("Image [%s] has incorrect or missing manifest config element: %s", - imageName, manifest.toString())); + Map manifest = this.containerRegistryService.getImageManifest(registryRequest, Map.class); + + if (manifest != null && manifest.get("config") == null) { + // when both Docker and OCI images are stored in repository the response for OCI image when using Docker manifest type will not contain config. + // In the case where we don't receive a config and schemaVersion is less than 2 we try OCI manifest type. + String manifestMediaType = registryRequest.getRegistryConf().getManifestMediaType(); + if (asInt(manifest.get("schemaVersion")) < 2 + && !manifestMediaType.equals(ContainerRegistryProperties.OCI_IMAGE_MANIFEST_MEDIA_TYPE)) { + registryRequest.getRegistryConf() + .setManifestMediaType(ContainerRegistryProperties.OCI_IMAGE_MANIFEST_MEDIA_TYPE); + manifest = this.containerRegistryService.getImageManifest(registryRequest, Map.class); + } + if (manifest.get("config") == null) { + String message = String.format("Image [%s] has incorrect or missing manifest config element: %s", + imageName, manifest); + throw new ContainerRegistryException(message); + } } if (manifest != null) { String configDigest = ((Map) manifest.get("config")).get("digest"); @@ -85,12 +99,24 @@ public Map getImageLabels(String imageName) { (Map) configElement.get("Labels") : Collections.emptyMap(); } else { - throw new ContainerRegistryException( - String.format("Image [%s] is missing manifest", imageName)); + throw new ContainerRegistryException(String.format("Image [%s] is missing manifest", imageName)); + } + } + + private static int asInt(Object value) { + if (value instanceof Number number) { + return number.intValue(); + } + else if (value instanceof String string) { + return Integer.parseInt(string); + } + else if (value != null) { + return Integer.parseInt(value.toString()); } + return 0; } - private boolean isNotNullMap(Object object) { - return object != null && (object instanceof Map); + private static boolean isNotNullMap(Object object) { + return object instanceof Map; } } diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-configuration-metadata/src/main/resources/META-INF/spring.factories deleted file mode 100644 index b2de9e2d12..0000000000 --- a/spring-cloud-dataflow-configuration-metadata/src/main/resources/META-INF/spring.factories +++ /dev/null @@ -1,2 +0,0 @@ -org.springframework.boot.autoconfigure.EnableAutoConfiguration:\ -org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolverAutoConfiguration diff --git a/spring-cloud-dataflow-configuration-metadata/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-dataflow-configuration-metadata/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 0000000000..5088d10aa1 --- /dev/null +++ b/spring-cloud-dataflow-configuration-metadata/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1 @@ +org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolverAutoConfiguration \ No newline at end of file diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java index 180885ffaf..9df2fc0df4 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/ApplicationConfigurationMetadataResolverAutoConfigurationTest.java @@ -23,8 +23,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; @@ -41,23 +40,23 @@ import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestPropertySource; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * @author Christian Tzolov + * @author Corneil du Plessis */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(classes = ApplicationConfigurationMetadataResolverAutoConfigurationTest.TestConfig.class) +@SpringJUnitConfig(classes = ApplicationConfigurationMetadataResolverAutoConfigurationTest.TestConfig.class) @TestPropertySource(properties = { ".dockerconfigjson={\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}" + ",\"demo2.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}", @@ -79,7 +78,7 @@ "spring.cloud.dataflow.container.registry-configurations[goharbor2].secret=Harbor12345", "spring.cloud.dataflow.container.registry-configurations[goharbor2].use-http-proxy=true" }) -public class ApplicationConfigurationMetadataResolverAutoConfigurationTest { +class ApplicationConfigurationMetadataResolverAutoConfigurationTest { @Autowired Map registryConfigurationMap; @@ -107,7 +106,7 @@ public class ApplicationConfigurationMetadataResolverAutoConfigurationTest { RestTemplate containerRestTemplateWithHttpProxy; @Test - public void registryConfigurationBeanCreationTest() { + void registryConfigurationBeanCreationTest() { assertThat(registryConfigurationMap).hasSize(4); ContainerRegistryConfiguration secretConf = registryConfigurationMap.get("demo.repository.io"); @@ -120,8 +119,7 @@ public void registryConfigurationBeanCreationTest() { .describedAs("The explicit disable-ssl-verification=true property should augment the .dockerconfigjson based config") .isTrue(); assertThat(secretConf.getExtra()).isNotEmpty(); - assertThat(secretConf.getExtra().get(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY)) - .isEqualTo("https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); + assertThat(secretConf.getExtra()).containsEntry(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, "https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); ContainerRegistryConfiguration secretConf2 = registryConfigurationMap.get("demo2.repository.io"); assertThat(secretConf2).isNotNull(); @@ -133,8 +131,7 @@ public void registryConfigurationBeanCreationTest() { .describedAs("The explicit disable-ssl-verification=true property should augment the .dockerconfigjson based config") .isTrue(); assertThat(secretConf2.getExtra()).isNotEmpty(); - assertThat(secretConf2.getExtra().get(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY)) - .isEqualTo("https://demo2.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); + assertThat(secretConf2.getExtra()).containsEntry(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, "https://demo2.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); ContainerRegistryConfiguration goharborConf = registryConfigurationMap.get("demo.goharbor.io"); assertThat(goharborConf).isNotNull(); @@ -144,8 +141,7 @@ public void registryConfigurationBeanCreationTest() { assertThat(goharborConf.getSecret()).isEqualTo("Harbor12345"); assertThat(goharborConf.isDisableSslVerification()).isFalse(); assertThat(goharborConf.getExtra()).isNotEmpty(); - assertThat(goharborConf.getExtra().get(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY)) - .isEqualTo("https://demo.goharbor.io/service/token?service=demo-registry2&scope=repository:{repository}:pull"); + assertThat(goharborConf.getExtra()).containsEntry(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, "https://demo.goharbor.io/service/token?service=demo-registry2&scope=repository:{repository}:pull"); ContainerRegistryConfiguration goharborConf2 = registryConfigurationMap.get("demo2.goharbor.io"); @@ -156,12 +152,11 @@ public void registryConfigurationBeanCreationTest() { assertThat(goharborConf2.getSecret()).isEqualTo("Harbor12345"); assertThat(goharborConf2.isDisableSslVerification()).isFalse(); assertThat(goharborConf2.getExtra()).isNotEmpty(); - assertThat(goharborConf2.getExtra().get(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY)) - .isEqualTo("https://demo2.goharbor.io/service/token?service=demo-registry2&scope=repository:{repository}:pull"); + assertThat(goharborConf2.getExtra()).containsEntry(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, "https://demo2.goharbor.io/service/token?service=demo-registry2&scope=repository:{repository}:pull"); } @Test - public void containerImageMetadataResolverWithActiveSSL() throws URISyntaxException { + void containerImageMetadataResolverWithActiveSSL() throws URISyntaxException { assertThat(containerImageMetadataResolver).isNotNull(); Map labels = containerImageMetadataResolver.getImageLabels("demo.goharbor.io/test/image:1.0.0"); assertThat(labels).containsExactly(Collections.singletonMap("foo", "bar").entrySet().iterator().next()); @@ -183,7 +178,7 @@ public void containerImageMetadataResolverWithActiveSSL() throws URISyntaxExcept } @Test - public void containerImageMetadataResolverWithDisabledSSL() throws URISyntaxException { + void containerImageMetadataResolverWithDisabledSSL() throws URISyntaxException { assertThat(containerImageMetadataResolver).isNotNull(); Map labels = containerImageMetadataResolver.getImageLabels("demo.repository.io/disabledssl/image:1.0.0"); assertThat(labels).containsExactly(Collections.singletonMap("foo", "bar").entrySet().iterator().next()); @@ -215,10 +210,18 @@ public ContainerImageRestTemplateFactory containerImageRestTemplateFactory( @Qualifier("containerRestTemplate") RestTemplate containerRestTemplate, @Qualifier("containerRestTemplateWithHttpProxy") RestTemplate containerRestTemplateWithHttpProxy) { ContainerImageRestTemplateFactory containerImageRestTemplateFactory = Mockito.mock(ContainerImageRestTemplateFactory.class); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(false))).thenReturn(noSslVerificationContainerRestTemplate); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(true))).thenReturn(noSslVerificationContainerRestTemplateWithHttpProxy); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(false))).thenReturn(containerRestTemplate); - when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(true))).thenReturn(containerRestTemplateWithHttpProxy); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(false), + anyMap())) + .thenReturn(noSslVerificationContainerRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(true), eq(true), + anyMap())) + .thenReturn(noSslVerificationContainerRestTemplateWithHttpProxy); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(false), + anyMap())) + .thenReturn(containerRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(eq(false), eq(true), + anyMap())) + .thenReturn(containerRestTemplateWithHttpProxy); return containerImageRestTemplateFactory; } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java index fbfdf61069..da329b0cb7 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/configuration/metadata/BootApplicationConfigurationMetadataResolverTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,9 +23,9 @@ import java.util.Map; import java.util.Set; -import org.hamcrest.Matcher; -import org.junit.Before; -import org.junit.Test; +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -35,12 +35,7 @@ import org.springframework.core.io.ClassPathResource; import org.springframework.util.StreamUtils; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasProperty; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; /** @@ -49,30 +44,31 @@ * @author Eric Bottard * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class BootApplicationConfigurationMetadataResolverTests { +class BootApplicationConfigurationMetadataResolverTests { @Mock private ContainerImageMetadataResolver containerImageMetadataResolver; private ApplicationConfigurationMetadataResolver resolver; - @Before - public void init() { + @BeforeEach + void init() { MockitoAnnotations.initMocks(this); resolver = new BootApplicationConfigurationMetadataResolver(containerImageMetadataResolver); } @Test - public void appDockerResourceEmptyLabels() { + void appDockerResourceEmptyLabels() { when(containerImageMetadataResolver.getImageLabels("test/test:latest")).thenReturn(new HashMap<>()); List properties = resolver .listProperties(new DockerResource("test/test:latest")); - assertThat(properties.size(), is(0)); + assertThat(properties).isEmpty(); } @Test - public void appDockerResource() throws IOException { + void appDockerResource() throws IOException { byte[] bytes = StreamUtils.copyToByteArray(new ClassPathResource( "apps/no-visible-properties/META-INF/spring-configuration-metadata.json", getClass()) .getInputStream()); @@ -82,11 +78,11 @@ public void appDockerResource() throws IOException { new String(bytes))); List properties = resolver .listProperties(new DockerResource("test/test:latest")); - assertThat(properties.size(), is(3)); + assertThat(properties).hasSize(3); } @Test - public void appDockerResourceBrokenFormat() { + void appDockerResourceBrokenFormat() { byte[] bytes = "Invalid metadata json content1".getBytes(); Map result = Collections.singletonMap( "org.springframework.cloud.dataflow.spring-configuration-metadata.json", @@ -94,82 +90,86 @@ public void appDockerResourceBrokenFormat() { when(containerImageMetadataResolver.getImageLabels("test/test:latest")).thenReturn(result); List properties = resolver .listProperties(new DockerResource("test/test:latest")); - assertThat(properties.size(), is(0)); + assertThat(properties).isEmpty(); } @Test - public void appSpecificVisiblePropsShouldBeVisible() { + void appSpecificVisiblePropsShouldBeVisible() { List properties = resolver .listProperties(new ClassPathResource("apps/filter-processor", getClass())); - assertThat(properties, hasItem(configPropertyIdentifiedAs("filter.expression"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2"))); + assertThat(properties) + .haveAtLeast(1, configPropertyIdentifiedAs("filter.expression")) + .haveAtLeast(1, configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2")); } @Test - public void appSpecificVisibleLegacyPropsShouldBeVisible() { - List properties = resolver - .listProperties(new ClassPathResource("apps/filter-processor-legacy", getClass())); - assertThat(properties, hasItem(configPropertyIdentifiedAs("filter.expression"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2"))); - } - - @Test - public void appSpecificVisibleLegacyPropsShouldBeVisibleIfBothInPlace() { - // test resource files has both expresso2 and expresso3 in spring-configuration-metadata - // and as we prefer new format(expresso3 included) and it exists - // expresso2 from old format doesn't get read. - List properties = resolver - .listProperties(new ClassPathResource("apps/filter-processor-both", getClass())); - assertThat(properties, hasItem(configPropertyIdentifiedAs("filter.expression"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso3"))); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.other.property.included.prefix.expresso2"))); - } - - @Test - public void otherPropertiesShouldOnlyBeVisibleInExtensiveCall() { + void otherPropertiesShouldOnlyBeVisibleInExtensiveCall() { List properties = resolver .listProperties(new ClassPathResource("apps/filter-processor", getClass())); - assertThat(properties, not(hasItem(configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret")))); + assertThat(properties).doNotHave(configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret")); properties = resolver.listProperties(new ClassPathResource("apps/filter-processor", getClass()), true); - assertThat(properties, hasItem(configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret"))); + assertThat(properties).haveAtLeast(1, configPropertyIdentifiedAs("some.prefix.hidden.by.default.secret")); } @Test - public void shouldReturnEverythingWhenNoDescriptors() { + void shouldReturnEverythingWhenNoDescriptors() { List properties = resolver .listProperties(new ClassPathResource("apps/no-visible-properties", getClass())); List full = resolver .listProperties(new ClassPathResource("apps/no-visible-properties", getClass()), true); - assertThat(properties.size(), is(0)); - assertThat(full.size(), is(3)); + assertThat(properties).isEmpty(); + assertThat(full).hasSize(3); } @Test - public void deprecatedErrorPropertiesShouldNotBeVisible() { + void deprecatedErrorPropertiesShouldNotBeVisible() { List properties = resolver .listProperties(new ClassPathResource("apps/deprecated-error", getClass())); List full = resolver .listProperties(new ClassPathResource("apps/deprecated-error", getClass()), true); - assertThat(properties.size(), is(0)); - assertThat(full.size(), is(2)); + assertThat(properties).isEmpty(); + assertThat(full).hasSize(2); + } + + @Test + void shouldReturnPortMappingProperties() { + Map> portNames = resolver.listPortNames(new ClassPathResource("apps/filter-processor", getClass())); + assertThat(portNames).hasSize(2); + assertThat(portNames.get("inbound")).hasSize(3); + assertThat(portNames.get("inbound")).contains("in1", "in2", "in3"); + assertThat(portNames.get("outbound")).hasSize(2); + assertThat(portNames.get("outbound")).contains("out1", "out2"); + } + + @Test + void shouldReturnOptionGroupsProperties() { + Map> optionGroups = resolver.listOptionGroups(new ClassPathResource("apps/filter-processor", getClass())); + assertThat(optionGroups).hasSize(4); + assertThat(optionGroups.get("g1")).hasSize(3); + assertThat(optionGroups.get("g1")).contains("foo1.bar1", "foo1.bar2", "foo1.bar3"); + assertThat(optionGroups.get("g2")).isEmpty(); + assertThat(optionGroups.get("g1.sb1")).hasSize(1); + assertThat(optionGroups.get("g1.sb1")).contains("foo2.bar1"); + assertThat(optionGroups.get("g1.sb2")).hasSize(2); + assertThat(optionGroups.get("g1.sb2")).contains("foo3.bar1", "foo3.bar2"); } @Test - public void appDockerResourceWithInboundOutboundPortMapping() { + void appDockerResourceWithInboundOutboundPortMapping() { Map result = new HashMap<>(); result.put("configuration-properties.inbound-ports", "input1,input2, input3"); result.put("configuration-properties.outbound-ports", "output1, output2"); when(this.containerImageMetadataResolver.getImageLabels("test/test:latest")).thenReturn(result); Map> portNames = this.resolver.listPortNames(new DockerResource("test/test:latest")); - assertThat(portNames.size(), is(2)); - assertThat(portNames.get("inbound").size(), is(3)); - assertThat(portNames.get("inbound"), containsInAnyOrder("input1", "input2", "input3")); - assertThat(portNames.get("outbound").size(), is(2)); - assertThat(portNames.get("outbound"), containsInAnyOrder("output1", "output2")); + assertThat(portNames).hasSize(2); + assertThat(portNames.get("inbound")).hasSize(3); + assertThat(portNames.get("inbound")).contains("input1", "input2", "input3"); + assertThat(portNames.get("outbound")).hasSize(2); + assertThat(portNames.get("outbound")).contains("output1", "output2"); } - private Matcher configPropertyIdentifiedAs(String name) { - return hasProperty("id", is(name)); + private Condition configPropertyIdentifiedAs(String name) { + return new Condition<>(c -> name.equals(c.getId()), "id:" + name); } } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java index be870752fb..83e360af8c 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/DefaultContainerImageMetadataResolverTest.java @@ -23,8 +23,9 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentMatcher; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -34,16 +35,19 @@ import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.util.StringUtils; import org.springframework.web.client.RestTemplate; import org.springframework.web.util.UriComponents; import org.springframework.web.util.UriComponentsBuilder; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -51,8 +55,9 @@ /** * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class DefaultContainerImageMetadataResolverTest { +class DefaultContainerImageMetadataResolverTest { @Mock private RestTemplate mockRestTemplate; @@ -64,11 +69,11 @@ public class DefaultContainerImageMetadataResolverTest { private ContainerRegistryService containerRegistryService; - @Before - public void init() { + @BeforeEach + void init() { MockitoAnnotations.initMocks(this); - when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean())).thenReturn(mockRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean(), anyMap())).thenReturn(mockRestTemplate); // DockerHub registry configuration by default. ContainerRegistryConfiguration dockerHubAuthConfig = new ContainerRegistryConfiguration(); @@ -88,17 +93,19 @@ public void init() { when(registryAuthorizer.getAuthorizationHeaders(any(ContainerImage.class), any())).thenReturn(new HttpHeaders()); this.containerRegistryService = new ContainerRegistryService(containerImageRestTemplateFactory, - new ContainerImageParser(), registryConfigurationMap, Arrays.asList(registryAuthorizer)); + new ContainerImageParser(), registryConfigurationMap, Collections.singletonList(registryAuthorizer)); } - @Test(expected = ContainerRegistryException.class) - public void getImageLabelsInvalidImageName() { - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - resolver.getImageLabels(null); + @Test + void getImageLabelsInvalidImageName() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); + resolver.getImageLabels(null); + }); } @Test - public void getImageLabels() throws JsonProcessingException { + void getImageLabels() throws JsonProcessingException { DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); @@ -109,12 +116,13 @@ public void getImageLabels() throws JsonProcessingException { "registry-1.docker.io", null, "test/image", "123"); Map labels = resolver.getImageLabels("test/image:latest"); - assertThat(labels.size(), is(1)); - assertThat(labels.get("boza"), is("koza")); + assertThat(labels) + .hasSize(1) + .containsEntry("boza", "koza"); } @Test - public void getImageLabelsFromPrivateRepository() throws JsonProcessingException { + void getImageLabelsFromPrivateRepository() throws JsonProcessingException { DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); @@ -125,65 +133,74 @@ public void getImageLabelsFromPrivateRepository() throws JsonProcessingException "my-private-repository.com", "5000", "test/image", "123"); Map labels = resolver.getImageLabels("my-private-repository.com:5000/test/image:latest"); - assertThat(labels.size(), is(1)); - assertThat(labels.get("boza"), is("koza")); + assertThat(labels) + .hasSize(1) + .containsEntry("boza", "koza"); } - @Test(expected = ContainerRegistryException.class) - public void getImageLabelsMissingRegistryConfiguration() { - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - resolver.getImageLabels("somehost:8083/test/image:latest"); + @Test + void getImageLabelsMissingRegistryConfiguration() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); + resolver.getImageLabels("somehost:8083/test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) - public void getImageLabelsMissingRegistryAuthorizer() { - - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( + @Test + void getImageLabelsMissingRegistryAuthorizer() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( new ContainerRegistryService(containerImageRestTemplateFactory, - new ContainerImageParser(), registryConfigurationMap, Collections.emptyList())); + new ContainerImageParser(), registryConfigurationMap, Collections.emptyList())); - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) - public void getImageLabelsMissingAuthorizationHeader() { - RegistryAuthorizer registryAuthorizer = mock(RegistryAuthorizer.class); + @Test + void getImageLabelsMissingAuthorizationHeader() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + RegistryAuthorizer registryAuthorizer = mock(RegistryAuthorizer.class); - when(registryAuthorizer.getType()).thenReturn(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); - when(registryAuthorizer.getAuthorizationHeaders(any(ContainerImage.class), any())).thenReturn(null); + when(registryAuthorizer.getType()).thenReturn(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); + when(registryAuthorizer.getAuthorizationHeaders(any(ContainerImage.class), any())).thenReturn(null); - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( new ContainerRegistryService(containerImageRestTemplateFactory, new ContainerImageParser(), registryConfigurationMap, Arrays.asList(registryAuthorizer))); - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) - public void getImageLabelsInvalidManifestResponse() { - - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); + @Test + void getImageLabelsInvalidManifestResponse() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - Map manifestResponseWithoutConfig = Collections.emptyMap(); - mockManifestRestTemplateCall(manifestResponseWithoutConfig, "registry-1.docker.io", + Map manifestResponseWithoutConfig = Collections.emptyMap(); + mockManifestRestTemplateCall(manifestResponseWithoutConfig, "registry-1.docker.io", null, "test/image", "latest"); - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } - @Test(expected = ContainerRegistryException.class) - public void getImageLabelsInvalidDigest() { - DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); + @Test + void getImageLabelsInvalidDigest() { + assertThatExceptionOfType(ContainerRegistryException.class).isThrownBy(() -> { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); - String emptyDigest = ""; - Map manifestResponse = Collections.singletonMap("config", Collections.singletonMap("digest", emptyDigest)); - mockManifestRestTemplateCall(manifestResponse, "registry-1.docker.io", null, + String emptyDigest = ""; + Map manifestResponse = Collections.singletonMap("config", Collections.singletonMap("digest", emptyDigest)); + mockManifestRestTemplateCall(manifestResponse, "registry-1.docker.io", null, "test/image", "latest"); - resolver.getImageLabels("test/image:latest"); + resolver.getImageLabels("test/image:latest"); + }); } @Test - public void getImageLabelsWithInvalidLabels() throws JsonProcessingException { + void getImageLabelsWithInvalidLabels() throws JsonProcessingException { DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver(this.containerRegistryService); @@ -195,7 +212,27 @@ public void getImageLabelsWithInvalidLabels() throws JsonProcessingException { "registry-1.docker.io", null, "test/image", "123"); Map labels = resolver.getImageLabels("test/image:latest"); - assertThat(labels.size(), is(0)); + assertThat(labels).isEmpty(); + } + + @Test + void getImageLabelsWithMixedOCIResponses() throws JsonProcessingException { + DefaultContainerImageMetadataResolver resolver = new MockedDefaultContainerImageMetadataResolver( + this.containerRegistryService); + String ociInCompatible = "{\"schemaVersion\": 1,\"name\": \"test/image\"}"; + String ociCompatible = "{\"schemaVersion\": 2,\"mediaType\": \"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\": \"application/vnd.oci.image.config.v1+json\",\"digest\": \"sha256:efc06d6096cc88697e477abb0b3479557e1bec688c36813383f1a8581f87d9f8\",\"size\": 34268}}"; + mockManifestRestTemplateCallAccepts(ociInCompatible, "my-private-repository.com", "5000", "test/image", + "latest", ContainerRegistryProperties.DOCKER_IMAGE_MANIFEST_MEDIA_TYPE); + mockManifestRestTemplateCallAccepts(ociCompatible, "my-private-repository.com", "5000", "test/image", "latest", + ContainerRegistryProperties.OCI_IMAGE_MANIFEST_MEDIA_TYPE); + String blobResponse = "{\"config\": {\"Labels\": {\"boza\": \"koza\"}}}"; + mockBlogRestTemplateCall(blobResponse, "my-private-repository.com", "5000", "test/image", + "sha256:efc06d6096cc88697e477abb0b3479557e1bec688c36813383f1a8581f87d9f8"); + + Map labels = resolver.getImageLabels("my-private-repository.com:5000/test/image:latest"); + assertThat(labels) + .isNotEmpty() + .containsEntry("boza", "koza"); } private void mockManifestRestTemplateCall(Map mapToReturn, String registryHost, @@ -234,6 +271,39 @@ private void mockBlogRestTemplateCall(String jsonResponse, String registryHost, .thenReturn(new ResponseEntity<>(new ObjectMapper().readValue(jsonResponse, Map.class), HttpStatus.OK)); } + private void mockManifestRestTemplateCallAccepts(String jsonResponse, String registryHost, String registryPort, + String repository, String tagOrDigest, String accepts) throws JsonProcessingException { + + UriComponents blobUriComponents = UriComponentsBuilder.newInstance() + .scheme("https") + .host(registryHost) + .port(StringUtils.hasText(registryPort) ? registryPort : null) + .path("v2/{repository}/manifests/{reference}") + .build() + .expand(repository, tagOrDigest); + + MediaType mediaType = new MediaType(org.apache.commons.lang3.StringUtils.substringBefore(accepts, "/"), + org.apache.commons.lang3.StringUtils.substringAfter(accepts, "/")); + when(mockRestTemplate.exchange(eq(blobUriComponents.toUri()), eq(HttpMethod.GET), + argThat(new HeaderAccepts(mediaType)), eq(Map.class))) + .thenReturn(new ResponseEntity<>(new ObjectMapper().readValue(jsonResponse, Map.class), HttpStatus.OK)); + } + + static class HeaderAccepts implements ArgumentMatcher> { + + private final MediaType accepts; + + public HeaderAccepts(MediaType accepts) { + this.accepts = accepts; + } + + @Override + public boolean matches(HttpEntity argument) { + return argument.getHeaders().getAccept().contains(accepts); + } + + } + private class MockedDefaultContainerImageMetadataResolver extends DefaultContainerImageMetadataResolver { public MockedDefaultContainerImageMetadataResolver(ContainerRegistryService containerRegistryService) { super(containerRegistryService); diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java index be15caa06f..759f0326f1 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverterTest.java @@ -21,8 +21,8 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -36,19 +36,18 @@ import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; /** * @author Christian Tzolov + * @author Corneil du Plessis */ -public class DockerConfigJsonSecretToRegistryConfigurationConverterTest { +class DockerConfigJsonSecretToRegistryConfigurationConverterTest { @Mock private RestTemplate mockRestTemplate; @@ -58,15 +57,15 @@ public class DockerConfigJsonSecretToRegistryConfigurationConverterTest { private DockerConfigJsonSecretToRegistryConfigurationConverter converter; - @Before - public void init() { + @BeforeEach + void init() { MockitoAnnotations.initMocks(this); - when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean())).thenReturn(mockRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean(), anyMap())).thenReturn(mockRestTemplate); converter = new DockerConfigJsonSecretToRegistryConfigurationConverter(new ContainerRegistryProperties(), containerImageRestTemplateFactory); } @Test - public void testConvertAnonymousRegistry() throws URISyntaxException { + void convertAnonymousRegistry() throws URISyntaxException { when(mockRestTemplate.exchange( eq(new URI("https://demo.repository.io/v2/")), eq(HttpMethod.GET), any(), eq(Map.class))) @@ -75,19 +74,20 @@ public void testConvertAnonymousRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertTrue(result.containsKey("demo.repository.io")); + assertThat(result) + .hasSize(1) + .containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), nullValue()); - assertThat(registryConfiguration.getSecret(), nullValue()); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.anonymous)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isNull(); + assertThat(registryConfiguration.getSecret()).isNull(); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.anonymous); } @Test - public void testConvertBasicAuthRegistry() throws URISyntaxException { + void convertBasicAuthRegistry() throws URISyntaxException { when(mockRestTemplate.exchange( eq(new URI("https://demo.repository.io/v2/_catalog")), eq(HttpMethod.GET), any(), eq(Map.class))) @@ -96,19 +96,20 @@ public void testConvertBasicAuthRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertTrue(result.containsKey("demo.repository.io")); + assertThat(result) + .hasSize(1) + .containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.basicauth)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.basicauth); } @Test - public void testConvertDockerHubRegistry() throws URISyntaxException { + void convertDockerHubRegistry() throws URISyntaxException { HttpHeaders authenticateHeader = new HttpHeaders(); authenticateHeader.add("Www-Authenticate", "Bearer realm=\"https://demo.repository.io/service/token/",service=\"demo-registry\",scope=\"registry:category:pull\""); @@ -121,17 +122,17 @@ public void testConvertDockerHubRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertTrue(result.containsKey("demo.repository.io")); + assertThat(result) + .hasSize(1) + .containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2)); - assertThat(registryConfiguration.getExtra().get("registryAuthUri"), - is("https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull")); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); + assertThat(registryConfiguration.getExtra()).containsEntry("registryAuthUri", "https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnContainerTestManual.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnContainerTestManual.java new file mode 100644 index 0000000000..13e6aa32a5 --- /dev/null +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnContainerTestManual.java @@ -0,0 +1,96 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.container.registry.authorization; + +import java.util.Collections; +import java.util.Map; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; + +import org.springframework.boot.test.autoconfigure.web.client.AutoConfigureWebClient; +import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolverAutoConfiguration; +import org.springframework.cloud.dataflow.configuration.metadata.container.DefaultContainerImageMetadataResolver; +import org.springframework.cloud.dataflow.container.registry.ContainerRegistryAutoConfiguration; +import org.springframework.cloud.dataflow.container.registry.ContainerRegistryConfiguration; +import org.springframework.cloud.dataflow.container.registry.ContainerRegistryProperties; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; +import org.springframework.context.annotation.Primary; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * This test is aimed at performing a manual test against a deployed container registry; + * In order to invoke this test populate the fields of DropAuthorizationHeaderOnContainerTestManual.TestApplication + * named registryDomainName, registryUser, registrySecret and imageNameAndTag + * + * The image should be one built with spring-boot:build-image or paketo so that is has a label named 'org.springframework.boot.version' + * For docker hub use: + * registryDomainName="registry-1.docker.io", + * registryUser="docker user" + * registrySecret="docker access token" + * imageNameAndTag="springcloudstream/s3-sink-rabbit:5.0.0" + * + * @author Corneil du Plessis + */ +public class DropAuthorizationHeaderOnContainerTestManual { + + private static final String registryDomainName = "registry-1.docker.io"; + private static final String registryUser = ""; + private static final String registrySecret = ""; + private static final String imageNameAndTag = "springcloudstream/s3-sink-rabbit:5.0.0"; + + private AnnotationConfigApplicationContext context; + + @AfterEach + void clean() { + if (context != null) { + context.close(); + } + context = null; + } + + @Test + void testContainerImageLabels() { + context = new AnnotationConfigApplicationContext(TestApplication.class); + DefaultContainerImageMetadataResolver imageMetadataResolver = context.getBean(DefaultContainerImageMetadataResolver.class); + Map imageLabels = imageMetadataResolver.getImageLabels(registryDomainName + "/" + imageNameAndTag); + System.out.println("imageLabels:" + imageLabels.keySet()); + assertThat(imageLabels).containsKey("org.springframework.boot.version"); + } + + @Import({ContainerRegistryAutoConfiguration.class, ApplicationConfigurationMetadataResolverAutoConfiguration.class}) + @AutoConfigureWebClient + static class TestApplication { + + @Bean + @Primary + ContainerRegistryProperties containerRegistryProperties() { + ContainerRegistryProperties properties = new ContainerRegistryProperties(); + ContainerRegistryConfiguration registryConfiguration = new ContainerRegistryConfiguration(); + registryConfiguration.setRegistryHost(registryDomainName); + registryConfiguration.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); + registryConfiguration.setUser(registryUser); + registryConfiguration.setSecret(registrySecret); + properties.setRegistryConfigurations(Collections.singletonMap(registryDomainName, registryConfiguration)); + + return properties; + } + } +} diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java index 0bcc0b49b7..80cfa743e8 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest.java @@ -19,9 +19,10 @@ import java.util.Collections; import java.util.Map; -import org.junit.After; -import org.junit.ClassRule; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; import org.springframework.boot.test.autoconfigure.web.client.AutoConfigureWebClient; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolverAutoConfiguration; @@ -39,17 +40,17 @@ /** * @author Adam J. Weigold + * @author Corneil du Plessis */ public class DropAuthorizationHeaderOnSignedS3RequestRedirectStrategyTest { - - @ClassRule + @RegisterExtension public final static S3SignedRedirectRequestServerResource s3SignedRedirectRequestServerResource = - new S3SignedRedirectRequestServerResource(); + new S3SignedRedirectRequestServerResource(); private AnnotationConfigApplicationContext context; - @After - public void clean() { + @AfterEach + void clean() { if (context != null) { context.close(); } @@ -57,14 +58,14 @@ public void clean() { } @Test - public void testRedirect() { + void redirect() { context = new AnnotationConfigApplicationContext(TestApplication.class); final DefaultContainerImageMetadataResolver imageMetadataResolver = - context.getBean(DefaultContainerImageMetadataResolver.class); + context.getBean(DefaultContainerImageMetadataResolver.class); Map imageLabels = imageMetadataResolver.getImageLabels("localhost:" + - s3SignedRedirectRequestServerResource.getS3SignedRedirectServerPort() + "/test/s3-redirect-image:1.0.0"); + s3SignedRedirectRequestServerResource.getS3SignedRedirectServerPort() + "/test/s3-redirect-image:1.0.0"); assertThat(imageLabels).containsOnly(entry("foo", "bar")); } @@ -78,14 +79,14 @@ ContainerRegistryProperties containerRegistryProperties() { ContainerRegistryProperties properties = new ContainerRegistryProperties(); ContainerRegistryConfiguration registryConfiguration = new ContainerRegistryConfiguration(); registryConfiguration.setRegistryHost( - String.format("localhost:%s", s3SignedRedirectRequestServerResource.getS3SignedRedirectServerPort())); + String.format("localhost:%s", s3SignedRedirectRequestServerResource.getS3SignedRedirectServerPort())); registryConfiguration.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); registryConfiguration.setUser("admin"); registryConfiguration.setSecret("Harbor12345"); registryConfiguration.setDisableSslVerification(true); registryConfiguration.setExtra(Collections.singletonMap( - DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, - "https://localhost/" + s3SignedRedirectRequestServerResource.getS3SignedRedirectServerPort() + "/service/token")); + DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, + "https://localhost/" + s3SignedRedirectRequestServerResource.getS3SignedRedirectServerPort() + "/service/token")); properties.setRegistryConfigurations(Collections.singletonMap("goharbor", registryConfiguration)); return properties; diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java index 0ff2c01608..214f0ccfa3 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/S3SignedRedirectRequestServerResource.java @@ -16,54 +16,54 @@ package org.springframework.cloud.dataflow.container.registry.authorization; -import org.junit.rules.ExternalResource; +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.cloud.dataflow.container.registry.authorization.support.S3SignedRedirectRequestServerApplication; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.util.SocketUtils; +import org.springframework.test.util.TestSocketUtils; /** * @author Adam J. Weigold + * @author Corneil du Plessis */ -public class S3SignedRedirectRequestServerResource extends ExternalResource { +public class S3SignedRedirectRequestServerResource implements BeforeEachCallback, AfterEachCallback { - private static final Logger logger = LoggerFactory.getLogger(S3SignedRedirectRequestServerResource.class); + private static final Logger logger = LoggerFactory.getLogger(S3SignedRedirectRequestServerResource.class); - private int s3SignedRedirectServerPort; + private int s3SignedRedirectServerPort; - private ConfigurableApplicationContext application; + private ConfigurableApplicationContext application; - public S3SignedRedirectRequestServerResource() { - super(); - } - @Override - protected void before() throws Throwable { + @Override + public void beforeEach(ExtensionContext context) throws Exception { - this.s3SignedRedirectServerPort = SocketUtils.findAvailableTcpPort(); + this.s3SignedRedirectServerPort = TestSocketUtils.findAvailableTcpPort(); - logger.info("Setting S3 Signed Redirect Server port to " + this.s3SignedRedirectServerPort); + logger.info("Setting S3 Signed Redirect Server port to " + this.s3SignedRedirectServerPort); - // Docker requires HTTPS. Generated ssl keypair as follows: - // `keytool -genkeypair -keyalg RSA -keysize 2048 -storetype PKCS12 -keystore s3redirectrequestserver.p12 -validity 1000000` - this.application = new SpringApplicationBuilder(S3SignedRedirectRequestServerApplication.class).build() - .run("--server.port=" + s3SignedRedirectServerPort, - "--server.ssl.key-store=classpath:s3redirectrequestserver.p12", - "--server.ssl.key-store-password=foobar"); - logger.info("S3 Signed Redirect Server Server is UP!"); - } + // Docker requires HTTPS. Generated ssl keypair as follows: + // `keytool -genkeypair -keyalg RSA -keysize 2048 -storetype PKCS12 -keystore s3redirectrequestserver.p12 -validity 1000000` + this.application = new SpringApplicationBuilder(S3SignedRedirectRequestServerApplication.class).build() + .run("--server.port=" + s3SignedRedirectServerPort, + "--server.ssl.key-store=classpath:s3redirectrequestserver.p12", + "--server.ssl.key-store-password=foobar"); + logger.info("S3 Signed Redirect Server Server is UP!"); + } - @Override - protected void after() { - application.stop(); - } + @Override + public void afterEach(ExtensionContext context) throws Exception { + application.stop(); + } - public int getS3SignedRedirectServerPort() { - return s3SignedRedirectServerPort; - } + public int getS3SignedRedirectServerPort() { + return s3SignedRedirectServerPort; + } } diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java index a3a5f2fe67..02f1ad5587 100644 --- a/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java +++ b/spring-cloud-dataflow-configuration-metadata/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java @@ -17,6 +17,7 @@ package org.springframework.cloud.dataflow.container.registry.authorization.support; import java.util.Collections; +import java.util.Locale; import java.util.Map; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryProperties; @@ -43,7 +44,7 @@ public ResponseEntity> getToken() { @RequestMapping("/v2/test/s3-redirect-image/manifests/1.0.0") public ResponseEntity getManifests(@RequestHeader("Authorization") String token) { - if (!"bearer my_token_999".equals(token.trim().toLowerCase())) { + if (!"bearer my_token_999".equals(token.trim().toLowerCase(Locale.ROOT))) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return buildFromString("{\"config\": {\"digest\": \"signed_redirect_digest\"} }"); @@ -51,7 +52,7 @@ public ResponseEntity getManifests(@RequestHeader("Authorization") Str @RequestMapping("/v2/test/s3-redirect-image/blobs/signed_redirect_digest") public ResponseEntity> getBlobRedirect(@RequestHeader("Authorization") String token) { - if (!"bearer my_token_999".equals(token.trim().toLowerCase())) { + if (!"bearer my_token_999".equals(token.trim().toLowerCase(Locale.ROOT))) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } HttpHeaders redirectHeaders = new HttpHeaders(); diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata-whitelist.properties b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata-whitelist.properties deleted file mode 100644 index 7cd3dbb2a4..0000000000 --- a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata-whitelist.properties +++ /dev/null @@ -1,2 +0,0 @@ -configuration-properties.classes=foo.bar.FilterProperties -configuration-properties.names=some.other.property.included.prefix.expresso2 diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata.json b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata.json deleted file mode 100644 index 21d7839a7a..0000000000 --- a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor-legacy/META-INF/spring-configuration-metadata.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "groups": [ - { - "name": "filter", - "type": "foo.bar.FilterProperties", - "sourceType": "foo.bar.FilterProperties" - } - ], - "properties": [ - { - "name": "filter.expression", - "type": "org.springframework.expression.Expression", - "description": "A predicate to evaluate", - "sourceType": "foo.bar.FilterProperties", - "defaultValue": "true" - }, - { - "name": "some.other.property.included.prefix.expresso2", - "type": "org.springframework.cloud.dataflow.completion.Expresso", - "description": "A property of type enum and whose name starts like 'expression'", - "sourceType": "com.acme.SomeDifferentProperties" - }, - { - "name": "some.prefix.hidden.by.default.secret", - "type": "java.lang.String", - "description": "Some hidden option", - "sourceType": "com.acme.OtherProperties" - } - ], - "hints": [] -} diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor/META-INF/dataflow-configuration-option-groups.properties b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor/META-INF/dataflow-configuration-option-groups.properties new file mode 100644 index 0000000000..ded4896539 --- /dev/null +++ b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor/META-INF/dataflow-configuration-option-groups.properties @@ -0,0 +1,7 @@ +org.springframework.cloud.dataflow.configuration-properties.option-groups.g1=foo1.bar1,\ + foo1.bar2,\ + foo1.bar3 +org.springframework.cloud.dataflow.configuration-properties.option-groups.g2= +org.springframework.cloud.dataflow.configuration-properties.option-groups.g1.sb1=foo2.bar1 +org.springframework.cloud.dataflow.configuration-properties.option-groups.g1.sb2=foo3.bar1,\ + foo3.bar2 diff --git a/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor/META-INF/dataflow-configuration-port-mapping.properties b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor/META-INF/dataflow-configuration-port-mapping.properties new file mode 100644 index 0000000000..9b3025a7d1 --- /dev/null +++ b/spring-cloud-dataflow-configuration-metadata/src/test/resources/org/springframework/cloud/dataflow/configuration/metadata/apps/filter-processor/META-INF/dataflow-configuration-port-mapping.properties @@ -0,0 +1,3 @@ +configuration-properties.inbound-ports=in1,in2,in3 +configuration-properties.outbound-ports=out1,out2 + diff --git a/spring-cloud-dataflow-container-registry/pom.xml b/spring-cloud-dataflow-container-registry/pom.xml index 6069dd036c..9bd6153af3 100644 --- a/spring-cloud-dataflow-container-registry/pom.xml +++ b/spring-cloud-dataflow-container-registry/pom.xml @@ -4,11 +4,17 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-container-registry + spring-cloud-dataflow-container-registry + Spring Cloud Data Flow Container Registry + UTF-8 + true + 3.4.1 @@ -24,6 +30,10 @@ com.amazonaws aws-java-sdk-ecr + + org.apache.httpcomponents.client5 + httpclient5-fluent + org.springframework.cloud spring-cloud-deployer-resource-docker @@ -32,6 +42,10 @@ org.springframework spring-web + + io.projectreactor.netty + reactor-netty + com.fasterxml.jackson.core jackson-core @@ -44,10 +58,6 @@ org.springframework.boot spring-boot-starter - - org.springframework.boot - spring-boot-loader - org.springframework.boot spring-boot-configuration-metadata @@ -63,4 +73,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.13.0 + + true + ${java.version} + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java index ec9e405428..22761ece42 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImage.java @@ -96,6 +96,7 @@ enum RepositoryReferenceType {tag, digest, unknown} /** * Helper method that returns the full Registry host address (host:port) + * @return The registry host name */ public String getRegistryHost() { return this.hostname + (StringUtils.hasText(this.port) ? ":" + this.port : ""); @@ -103,6 +104,7 @@ public String getRegistryHost() { /** * Helper method that returns the full Repository name (e.g. namespace/registryName) without the tag or digest. + * @return The repository name. */ public String getRepository() { String ns = StringUtils.hasText(this.repositoryNamespace) ? this.repositoryNamespace + "/" : ""; diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java index bf75ff4976..a4c8c21813 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerImageRestTemplateFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2021 the original author or authors. + * Copyright 2021-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,29 +16,29 @@ package org.springframework.cloud.dataflow.container.registry; -import java.security.KeyManagementException; -import java.security.NoSuchAlgorithmException; -import java.security.cert.X509Certificate; import java.util.ArrayList; -import java.util.List; -import java.util.Objects; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509TrustManager; +import javax.net.ssl.SSLException; -import org.apache.http.HttpHost; -import org.apache.http.client.config.CookieSpecs; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.client.HttpClients; +import io.netty.handler.codec.http.HttpHeaders; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.ssl.SslContext; +import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.util.InsecureTrustManagerFactory; +import reactor.netty.http.client.HttpClient; +import reactor.netty.http.client.HttpClientRequest; +import reactor.netty.transport.ProxyProvider; import org.springframework.boot.web.client.RestTemplateBuilder; -import org.springframework.cloud.dataflow.container.registry.authorization.DropAuthorizationHeaderOnSignedS3RequestRedirectStrategy; import org.springframework.http.MediaType; -import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.http.client.ClientHttpRequestFactory; +import org.springframework.http.client.ReactorNettyClientRequestFactory; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.web.client.RestTemplate; @@ -74,49 +74,32 @@ * * * @author Christian Tzolov + * @author Cheng Guan Poh + * @author Corneil du Plessis */ public class ContainerImageRestTemplateFactory { + private static final String CUSTOM_REGISTRY = "custom-registry"; + private static final String AMZ_CREDENTIAL = "X-Amz-Credential"; + private static final String AUTHORIZATION_HEADER = "Authorization"; + private static final String AZURECR_URI_SUFFIX = "azurecr.io"; + private static final String BASIC_AUTH = "Basic"; + private final RestTemplateBuilder restTemplateBuilder; private final ContainerRegistryProperties properties; /** - * Depends on the disablesSslVerification and useHttpProxy a 4 different RestTemplate configurations might be + * Depends on the skipSslVerification and withHttpProxy and extra map with multiple configurations might be * used at the same time for interacting with different container registries. - * The cache map allows reusing the RestTemplates for given useHttpProxy and disablesSslVerification combination. + * The cache map allows reusing the RestTemplates for given withHttpProxy and skipSslVerification and extra map combination. */ private final ConcurrentHashMap restTemplateCache; /** - * Unique key for any useHttpProxy and disablesSslVerification combination. + * Unique key for any withHttpProxy and skipSslVerification combination. */ - private static class CacheKey { - private final boolean disablesSslVerification; - private final boolean useHttpProxy; - - public CacheKey(boolean disablesSslVerification, boolean useHttpProxy) { - this.disablesSslVerification = disablesSslVerification; - this.useHttpProxy = useHttpProxy; - } - - static CacheKey of(boolean disablesSslVerification, boolean useHttpProxy) { - return new CacheKey(disablesSslVerification, useHttpProxy); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CacheKey cacheKey = (CacheKey) o; - return disablesSslVerification == cacheKey.disablesSslVerification && useHttpProxy == cacheKey.useHttpProxy; - } - - @Override - public int hashCode() { - return Objects.hash(disablesSslVerification, useHttpProxy); - } - } + record CacheKey(boolean skipSslVerification, boolean withHttpProxy, Map extra) {}; public ContainerImageRestTemplateFactory(RestTemplateBuilder restTemplateBuilder, ContainerRegistryProperties properties) { this.restTemplateBuilder = restTemplateBuilder; @@ -124,80 +107,136 @@ public ContainerImageRestTemplateFactory(RestTemplateBuilder restTemplateBuilder this.restTemplateCache = new ConcurrentHashMap(); } + /** + * Obtain a configured RestTemplate for interacting with container registry. + * @param skipSslVerification indicates we want to trust all certificates. + * @param withHttpProxy indicates we want to use configured proxy. + * @return A configured RestTemplate with the given ssl and proxy settings. + */ public RestTemplate getContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy) { + return this.getContainerRestTemplate(skipSslVerification, withHttpProxy, Collections.emptyMap()); + } + + /** + * Obtain a configured RestTemplate for interacting with container registry. + * @param skipSslVerification indicates that we want to trust all certificates. + * @param withHttpProxy indicates we want to use the configure proxy host and port. + * @param extra by adding entry custom-registry=registry-domain we expect to remove Authorization headers. + * @return A configured RestTemplate with the given ssl and proxy and extra settings. + */ + public RestTemplate getContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy, Map extra) { + var cacheKey = new CacheKey(skipSslVerification, withHttpProxy, new HashMap<>(extra)); try { - CacheKey cacheKey = CacheKey.of(skipSslVerification, withHttpProxy); - if (!this.restTemplateCache.containsKey(cacheKey)) { - RestTemplate restTemplate = createContainerRestTemplate(skipSslVerification, withHttpProxy); - this.restTemplateCache.putIfAbsent(cacheKey, restTemplate); - } - return this.restTemplateCache.get(cacheKey); + return this.restTemplateCache.computeIfAbsent(cacheKey, (key) -> createContainerRestTemplate(key.skipSslVerification(), key.withHttpProxy(), key.extra())); } catch (Exception e) { - throw new ContainerRegistryException( - "Failed to create Container Image RestTemplate for disableSsl:" - + skipSslVerification + ", httpProxy:" + withHttpProxy, e); + throw new ContainerRegistryException("Failed to create Container Image RestTemplate for disableSsl:" + skipSslVerification + ", httpProxy:" + withHttpProxy, e); } } - private RestTemplate createContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy) - throws NoSuchAlgorithmException, KeyManagementException { - - if (!skipSslVerification) { - // Create a RestTemplate that uses custom request factory - return this.initRestTemplate(HttpClients.custom(), withHttpProxy); - } - - // Trust manager that blindly trusts all SSL certificates. - TrustManager[] trustAllCerts = new TrustManager[] { - new X509TrustManager() { - public java.security.cert.X509Certificate[] getAcceptedIssuers() { - return new X509Certificate[0]; - } + private RestTemplate createContainerRestTemplate(boolean skipSslVerification, boolean withHttpProxy, Map extra) { + HttpClient client = httpClientBuilder(skipSslVerification, extra); + return initRestTemplate(client, withHttpProxy, extra); + } - public void checkClientTrusted(java.security.cert.X509Certificate[] certs, String authType) { - } + /** + * Amazon, Azure and Custom Container Registry services require special treatment for the Authorization headers when the + * HTTP request are forwarded to 3rd party services. + * + * Amazon: + * The Amazon S3 API supports two Authentication Methods (https://amzn.to/2Dg9sga): + * (1) HTTP Authorization header and (2) Query string parameters (often referred to as a pre-signed URL). + * + * But only one auth mechanism is allowed at a time. If the http request contains both an Authorization header and + * an pre-signed URL parameters then an error is thrown. + * + * Container Registries often use AmazonS3 as a backend object store. If HTTP Authorization header + * is used to authenticate with the Container Registry and then this registry redirect the request to a S3 storage + * using pre-signed URL authentication, the redirection will fail. + * + * Solution is to implement a HTTP redirect strategy that removes the original Authorization headers when the request is + * redirected toward an Amazon signed URL. + * + * Azure: + * Azure have same type of issues as S3 so header needs to be dropped as well. + * (https://docs.microsoft.com/en-us/azure/container-registry/container-registry-faq#authentication-information-is-not-given-in-the-correct-format-on-direct-rest-api-calls) + * + * Custom: + * Custom Container Registry may have same type of issues as S3 so header needs to be dropped as well. + */ + private HttpClient httpClientBuilder(boolean skipSslVerification, Map extra) { - public void checkServerTrusted(java.security.cert.X509Certificate[] certs, String authType) { - } + try { + SslContextBuilder builder = skipSslVerification + ? SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE) + : SslContextBuilder.forClient(); + SslContext sslContext = builder.build(); + HttpClient client = HttpClient.create().secure(sslContextSpec -> sslContextSpec.sslContext(sslContext)); + + return client.followRedirect(true, (entries, httpClientRequest) -> { + if (shouldRemoveAuthorization(httpClientRequest, extra)) { + HttpHeaders httpHeaders = httpClientRequest.requestHeaders(); + removeAuthorization(httpHeaders); + removeAuthorization(entries); + httpClientRequest.headers(httpHeaders); } - }; - SSLContext sslContext = SSLContext.getInstance("SSL"); - // Install trust manager to SSL Context. - sslContext.init(null, trustAllCerts, new java.security.SecureRandom()); - - // Create a RestTemplate that uses custom request factory - return initRestTemplate( - HttpClients.custom() - .setSSLContext(sslContext) - .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE), - withHttpProxy); + }); + } + catch (SSLException e) { + throw new RuntimeException(e); + } + } + + private boolean shouldRemoveAuthorization(HttpClientRequest request, Map extra) { + HttpMethod method = request.method(); + if(!method.equals(HttpMethod.GET) && !method.equals(HttpMethod.HEAD)) { + return false; + } + if (request.uri().contains(AMZ_CREDENTIAL)) { + return true; + } + if (request.uri().contains(AZURECR_URI_SUFFIX)) { + return request.requestHeaders() + .entries() + .stream() + .anyMatch(entry -> entry.getKey().equalsIgnoreCase(AUTHORIZATION_HEADER) + && entry.getValue().contains(BASIC_AUTH)); + } + return extra.containsKey(CUSTOM_REGISTRY) && request.uri().contains(extra.get(CUSTOM_REGISTRY)); } - private RestTemplate initRestTemplate(HttpClientBuilder clientBuilder, boolean withHttpProxy) { + private static void removeAuthorization(HttpHeaders headers) { + Set authHeaders = headers.entries() + .stream() + .filter(entry -> entry.getKey().equalsIgnoreCase(AUTHORIZATION_HEADER)).map(Map.Entry::getKey).collect(Collectors.toSet()); + authHeaders.forEach(authHeader -> headers.remove(authHeader)); + } - clientBuilder.setDefaultRequestConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build()); + private RestTemplate initRestTemplate(HttpClient client, boolean withHttpProxy, Map extra) { // Set the HTTP proxy if configured. if (withHttpProxy) { if (!properties.getHttpProxy().isEnabled()) { throw new ContainerRegistryException("Registry Configuration uses a HttpProxy but non is configured!"); } - HttpHost proxy = new HttpHost(properties.getHttpProxy().getHost(), properties.getHttpProxy().getPort()); - clientBuilder.setProxy(proxy); + ProxyProvider.Builder builder = ProxyProvider.builder() + .type(ProxyProvider.Proxy.HTTP) + .host(properties.getHttpProxy().getHost()) + .port(properties.getHttpProxy().getPort()); + client.proxy(typeSpec -> builder.build()); } - HttpComponentsClientHttpRequestFactory customRequestFactory = - new HttpComponentsClientHttpRequestFactory( - clientBuilder - .setRedirectStrategy(new DropAuthorizationHeaderOnSignedS3RequestRedirectStrategy()) - .build()); + ClientHttpRequestFactory customRequestFactory = new ReactorNettyClientRequestFactory(client); // DockerHub response's media-type is application/octet-stream although the content is in JSON. - // Therefore extend the MappingJackson2HttpMessageConverter media-types to include application/octet-stream. + // Similarly the Github CR response's media-type is always text/plain although the content is in JSON. + // Therefore we extend the MappingJackson2HttpMessageConverter media-types to + // include application/octet-stream and text/plain. MappingJackson2HttpMessageConverter octetSupportJsonConverter = new MappingJackson2HttpMessageConverter(); - List mediaTypeList = new ArrayList(octetSupportJsonConverter.getSupportedMediaTypes()); + ArrayList mediaTypeList = new ArrayList(octetSupportJsonConverter.getSupportedMediaTypes()); + mediaTypeList.add(MediaType.APPLICATION_JSON); mediaTypeList.add(MediaType.APPLICATION_OCTET_STREAM); + mediaTypeList.add(MediaType.TEXT_PLAIN); octetSupportJsonConverter.setSupportedMediaTypes(mediaTypeList); return restTemplateBuilder diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryAutoConfiguration.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryAutoConfiguration.java index 9074624dcb..08eeddba2e 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryAutoConfiguration.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryAutoConfiguration.java @@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.web.client.RestTemplateBuilder; @@ -34,7 +35,6 @@ import org.springframework.cloud.dataflow.container.registry.authorization.DockerOAuth2RegistryAuthorizer; import org.springframework.cloud.dataflow.container.registry.authorization.RegistryAuthorizer; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -44,8 +44,8 @@ * @author Christian Tzolov * @author Ilayaperumal Gopinathan */ -@Configuration -@EnableConfigurationProperties({ ContainerRegistryProperties.class }) +@AutoConfiguration +@EnableConfigurationProperties({ContainerRegistryProperties.class}) public class ContainerRegistryAutoConfiguration { private static final Logger logger = LoggerFactory.getLogger(ContainerRegistryAutoConfiguration.class); @@ -149,7 +149,7 @@ public Map registryConfigurationMap(Cont } } - logger.info("Final Registry Configurations: " + registryConfigurationMap); + logger.debug("Final Registry Configurations: " + registryConfigurationMap); return registryConfigurationMap; } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java index d332ab512e..6cb7450861 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfiguration.java @@ -23,38 +23,38 @@ /** * Configurations specific for each target Container Registry provider/instance. - * + *

* The Docker Hub configuration is set by default. Additional registries can be configured through the * {@link ContainerRegistryProperties#getRegistryConfigurations()} properties like this: * - * - * Configure Arifactory/JFrog private container registry: - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].registry-host=springsource-docker-private-local.jfrog.io - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].authorization-type=basicauth - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].user=[artifactory user] - * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].secret=[artifactory encryptedkey] - * - * Configure Amazon ECR private registry: - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].registry-host=283191309520.dkr.ecr.us-west-1.amazonaws.com - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].authorization-type=awsecr - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].user=[your AWS accessKey] - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].secret=[your AWS secretKey] - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[region]=us-west-1 - * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[registryIds]=283191309520 - * - * Configure Azure private container registry - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].registry-host=tzolovazureregistry.azurecr.io - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].authorization-type=basicauth - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].user=[your Azure registry username] - * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].secret=[your Azure registry access password] - * - * Harbor Registry. Same as DockerHub but with different registryAuthUri - * - spring.cloud.dataflow.container.registry-configurations[harbor].registry-host=demo.goharbor.io - * - spring.cloud.dataflow.container.registry-configurations[harbor].authorization-type=dockeroauth2 - * - spring.cloud.dataflow.container.registry-configurations[harbor].user=admin - * - spring.cloud.dataflow.container.registry-configurations[harbor].secret=Harbor12345 - * - spring.cloud.dataflow.container.registry-configurations[harbor].extra[registryAuthUri]=https://demo.goharbor.io/service/token?service=harbor-registry&scope=repository:{repository}:pull - * + * {@code + * Configure Arifactory/JFrog private container registry: + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].registry-host=springsource-docker-private-local.jfrog.io + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].authorization-type=basicauth + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].user=[artifactory user] + * - spring.cloud.dataflow.container.registry-configurations[springsourcejfrog].secret=[artifactory encryptedkey] + *

+ * Configure Amazon ECR private registry: + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].registry-host=283191309520.dkr.ecr.us-west-1.amazonaws.com + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].authorization-type=awsecr + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].user=[your AWS accessKey] + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].secret=[your AWS secretKey] + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[region]=us-west-1 + * - spring.cloud.dataflow.container.registry-configurations[myamazonaws].extra[registryIds]=283191309520 + *

+ * Configure Azure private container registry + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].registry-host=tzolovazureregistry.azurecr.io + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].authorization-type=basicauth + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].user=[your Azure registry username] + * - spring.cloud.dataflow.container.registry-configurations[tzolovazureregistry].secret=[your Azure registry access password] + *

+ * Harbor Registry. Same as DockerHub but with different registryAuthUri + * - spring.cloud.dataflow.container.registry-configurations[harbor].registry-host=demo.goharbor.io + * - spring.cloud.dataflow.container.registry-configurations[harbor].authorization-type=dockeroauth2 + * - spring.cloud.dataflow.container.registry-configurations[harbor].user=admin + * - spring.cloud.dataflow.container.registry-configurations[harbor].secret=Harbor12345 + * - spring.cloud.dataflow.container.registry-configurations[harbor].extra[registryAuthUri]=https://demo.goharbor.io/service/token?service=harbor-registry&scope=repository:repository-name:pull: + * } * * @author Christian Tzolov */ @@ -92,7 +92,7 @@ public enum AuthorizationType { /** * Container Registry Host (and optional port). Must me unique per registry. - * + *

* Used as a key to to map a container image to target registry where it is stored! */ private String registryHost; @@ -102,6 +102,7 @@ public enum AuthorizationType { * (determined by the {@link #authorizationType}) to authorize the registry access. */ private String user; + private String secret; /** @@ -197,14 +198,14 @@ public void setUseHttpProxy(boolean useHttpProxy) { @Override public String toString() { return "ContainerRegistryConfiguration{" + - "registryHost='" + registryHost + '\'' + - ", user='" + user + '\'' + - ", secret='****'" + '\'' + - ", authorizationType=" + authorizationType + - ", manifestMediaType='" + manifestMediaType + '\'' + - ", disableSslVerification='" + disableSslVerification + '\'' - +", useHttpProxy='" + useHttpProxy + '\'' + - ", extra=" + extra + - '}'; + "registryHost='" + registryHost + '\'' + + ", user='" + user + '\'' + + ", secret='****'" + '\'' + + ", authorizationType=" + authorizationType + + ", manifestMediaType='" + manifestMediaType + '\'' + + ", disableSslVerification='" + disableSslVerification + '\'' + + ", useHttpProxy='" + useHttpProxy + '\'' + + ", extra=" + extra + + '}'; } } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java index 3105c7f0ff..5719fe41d0 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryService.java @@ -40,6 +40,7 @@ * * @author Ilayaperumal Gopinathan * @author Christian Tzolov + * @author Corneil du Plessis */ public class ContainerRegistryService { @@ -109,14 +110,16 @@ public List getTags(String registryName, String repositoryName) { .build().expand(repositoryName); RestTemplate requestRestTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate( - containerRegistryConfiguration.isDisableSslVerification(), containerRegistryConfiguration.isUseHttpProxy()); + containerRegistryConfiguration.isDisableSslVerification(), + containerRegistryConfiguration.isUseHttpProxy(), + containerRegistryConfiguration.getExtra()); ResponseEntity manifest = requestRestTemplate.exchange(manifestUriComponents.toUri(), HttpMethod.GET, new HttpEntity<>(httpHeaders), Map.class); return (List) manifest.getBody().get(TAGS_FIELD); } catch (Exception e) { - logger.error(String.format("Exception getting tag information for the %s from %s", repositoryName, registryName)); + logger.error("Exception getting tag information for the {} from {}", repositoryName, registryName); } return null; } @@ -145,14 +148,16 @@ public Map getRepositories(String registryName) { RestTemplate requestRestTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate( - containerRegistryConfiguration.isDisableSslVerification(), containerRegistryConfiguration.isUseHttpProxy()); + containerRegistryConfiguration.isDisableSslVerification(), + containerRegistryConfiguration.isUseHttpProxy(), + containerRegistryConfiguration.getExtra()); ResponseEntity manifest = requestRestTemplate.exchange(manifestUriComponents.toUri(), HttpMethod.GET, new HttpEntity<>(httpHeaders), Map.class); return manifest.getBody(); } catch (Exception e) { - logger.error(String.format("Exception getting repositories from %s", registryName)); + logger.error("Exception getting repositories from {}", registryName); } return null; } @@ -184,7 +189,7 @@ public ContainerRegistryRequest getRegistryRequest(String imageName) { } RestTemplate requestRestTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate( - registryConf.isDisableSslVerification(), registryConf.isUseHttpProxy()); + registryConf.isDisableSslVerification(), registryConf.isUseHttpProxy(), registryConf.getExtra()); return new ContainerRegistryRequest(containerImage, registryConf, authHttpHeaders, requestRestTemplate); } @@ -218,14 +223,14 @@ public T getImageBlob(ContainerRegistryRequest registryRequest, String confi // Docker Registry HTTP V2 API pull config blob UriComponents blobUriComponents = UriComponentsBuilder.newInstance() - .scheme(HTTPS_SCHEME) - .host(containerImage.getHostname()) - .port(StringUtils.hasText(containerImage.getPort()) ? containerImage.getPort() : null) - .path(IMAGE_BLOB_DIGEST_PATH) - .build().expand(containerImage.getRepository(), configDigest); + .scheme(HTTPS_SCHEME) + .host(containerImage.getHostname()) + .port(StringUtils.hasText(containerImage.getPort()) ? containerImage.getPort() : null) + .path(IMAGE_BLOB_DIGEST_PATH) + .build().expand(containerImage.getRepository(), configDigest); ResponseEntity blob = registryRequest.getRestTemplate().exchange(blobUriComponents.toUri(), - HttpMethod.GET, new HttpEntity<>(httpHeaders), responseClassType); + HttpMethod.GET, new HttpEntity<>(httpHeaders), responseClassType); return blob.getBody(); } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java index cbacdf57c3..2a8afd5da5 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToRegistryConfigurationConverter.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.container.registry.authorization; +import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -43,43 +44,46 @@ /** * @author Christian Tzolov + * @author Corneil du Plessis */ public class DockerConfigJsonSecretToRegistryConfigurationConverter implements Converter> { private static final Logger logger = LoggerFactory.getLogger(DockerConfigJsonSecretToRegistryConfigurationConverter.class); + public static final String BEARER_REALM_ATTRIBUTE = "Bearer realm"; + public static final String SERVICE_ATTRIBUTE = "service"; + public static final String HTTPS_INDEX_DOCKER_IO_V_1 = "https://index.docker.io/v1/"; + public static final String DOCKER_IO = "docker.io"; + public static final String REGISTRY_1_DOCKER_IO = "registry-1.docker.io"; - // private final RestTemplate restTemplate; - private final ContainerImageRestTemplateFactory containerImageRestTemplate; + private final ContainerImageRestTemplateFactory containerImageRestTemplateFactory; private final Map httpProxyPerHost; private final boolean replaceDefaultDockerRegistryServer; - public DockerConfigJsonSecretToRegistryConfigurationConverter(ContainerRegistryProperties properties, - ContainerImageRestTemplateFactory containerImageRestTemplate) { - + public DockerConfigJsonSecretToRegistryConfigurationConverter( + ContainerRegistryProperties properties, + ContainerImageRestTemplateFactory containerImageRestTemplateFactory) { this.replaceDefaultDockerRegistryServer = properties.isReplaceDefaultDockerRegistryServer(); - // Retrieve registry configurations, explicitly declared via properties. this.httpProxyPerHost = properties.getRegistryConfigurations().entrySet().stream() - .collect(Collectors.toMap(e -> e.getValue().getRegistryHost(), e -> e.getValue().isUseHttpProxy())); - this.containerImageRestTemplate = containerImageRestTemplate; + .collect(Collectors.toMap(e -> e.getValue().getRegistryHost(), e -> e.getValue().isUseHttpProxy())); + this.containerImageRestTemplateFactory = containerImageRestTemplateFactory; } /** * The .dockerconfigjson value hast the following format: * - * {"auths":{"demo.goharbor.io":{"username":"admin","password":"Harbor12345","auth":"YWRtaW46SGFyYm9yMTIzNDU="}}} + * {"auths":{"demo.goharbor.io":{"username":"admin","password":"Harbor12345","auth":"YWRtaW46SGFyYm9yMTIzNDU="}}} * - * + *

* The map key is the registry host name and the value contains the username and password to access this registry. * * @param dockerconfigjson to convert into RegistryConfiguration map. - * * @return Return as (host-name, registry-configuration) map constructed from the dockerconfigjson content. */ @Override @@ -98,17 +102,15 @@ public Map convert(String dockerconfigjs rc.setSecret((String) registryMap.get("password")); Optional tokenAccessUrl = getDockerTokenServiceUri(rc.getRegistryHost(), - true, this.httpProxyPerHost.getOrDefault(rc.getRegistryHost(), false)); + true, this.httpProxyPerHost.getOrDefault(rc.getRegistryHost(), false)); if (tokenAccessUrl.isPresent()) { rc.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); rc.getExtra().put(DockerOAuth2RegistryAuthorizer.DOCKER_REGISTRY_AUTH_URI_KEY, tokenAccessUrl.get()); - } - else { + } else { if (StringUtils.isEmpty(rc.getUser()) && StringUtils.isEmpty(rc.getSecret())) { rc.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.anonymous); - } - else { + } else { rc.setAuthorizationType(ContainerRegistryConfiguration.AuthorizationType.basicauth); } } @@ -118,8 +120,7 @@ public Map convert(String dockerconfigjs registryConfigurationMap.put(rc.getRegistryHost(), rc); } return registryConfigurationMap; - } - catch (Exception e) { + } catch (Exception e) { logger.error("Failed to parse the Secrets in dockerconfigjson"); } } @@ -134,7 +135,7 @@ public Map convert(String dockerconfigjs * To be able to reuse docker registry secretes for the purpose of imagePullSecrets and SCDF Container Metadata retrieval. * by default the `https://index.docker.io/v1/` and `domain.io` docker-server values found in any mounted dockerconfigjson secret * are replaced by `registry-1.docker.io`. - * + *

* You can override this behaviour by setting replaceDefaultDockerRegistryServer to false. * * @param dockerConfigJsonRegistryHost Docker-Server property value as extracted from the dockerconfigjson. @@ -142,40 +143,61 @@ public Map convert(String dockerconfigjs */ private String replaceDefaultDockerRegistryServerUrl(String dockerConfigJsonRegistryHost) { return (this.replaceDefaultDockerRegistryServer && (DOCKER_IO.equals(dockerConfigJsonRegistryHost) - || HTTPS_INDEX_DOCKER_IO_V_1.equals(dockerConfigJsonRegistryHost))) ? - REGISTRY_1_DOCKER_IO : dockerConfigJsonRegistryHost; + || HTTPS_INDEX_DOCKER_IO_V_1.equals(dockerConfigJsonRegistryHost))) ? + REGISTRY_1_DOCKER_IO : dockerConfigJsonRegistryHost; } /** * Best effort to construct a valid Docker OAuth2 token authorization uri from the HTTP 401 Error response. - * + *

* Hit the http://registry-host/v2/ and parse the on authorization error (401) response. * If a Www-Authenticate response header exists and contains a "Bearer realm" and "service" attributes then use * them to constructs the Token Endpoint URI. - * + *

* Returns null for non 401 errors or invalid Www-Authenticate content. - * + *

* Applicable only for dockeroauth2 authorization-type. * * @param registryHost Container Registry host to retrieve the tokenServiceUri for. + * @param disableSSl Disable SSL + * @param useHttpProxy Enable the use of http proxy. * @return Returns Token Endpoint Url or null. */ public Optional getDockerTokenServiceUri(String registryHost, boolean disableSSl, boolean useHttpProxy) { try { - RestTemplate restTemplate = this.containerImageRestTemplate.getContainerRestTemplate(disableSSl, useHttpProxy); - restTemplate.exchange( - UriComponentsBuilder.newInstance().scheme("https").host(registryHost).path("v2/").build().toUri(), - HttpMethod.GET, new HttpEntity<>(new HttpHeaders()), Map.class); + RestTemplate restTemplate = this.containerImageRestTemplateFactory.getContainerRestTemplate(disableSSl, useHttpProxy, Collections.emptyMap()); + String host = registryHost; + Integer port = null; + if (registryHost.contains(":")) { + int colon = registryHost.lastIndexOf(":"); + String portString = registryHost.substring(colon + 1); + try { + int intPort = Integer.parseInt(portString); + if (Integer.toString(intPort).equals(portString) && intPort > 0 && intPort < 32767) { + port = intPort; + host = registryHost.substring(0, colon); + } + } catch (NumberFormatException x) { + // not valid integer + } + } + UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.newInstance().scheme("https").host(host); + if (port != null) { + uriComponentsBuilder.port(port); + } + uriComponentsBuilder.path("v2/"); + URI uri = uriComponentsBuilder.build().toUri(); + logger.info("getDockerTokenServiceUri:" + uri); + restTemplate.exchange(uri, HttpMethod.GET, new HttpEntity<>(new HttpHeaders()), Map.class); return Optional.empty(); - } - catch (HttpClientErrorException httpError) { + } catch (HttpClientErrorException httpError) { - if (httpError.getRawStatusCode() != 401) { + if (httpError.getStatusCode().value() != 401) { return Optional.empty(); } if (httpError.getResponseHeaders() == null - || !httpError.getResponseHeaders().containsKey(HttpHeaders.WWW_AUTHENTICATE)) { + || !httpError.getResponseHeaders().containsKey(HttpHeaders.WWW_AUTHENTICATE)) { return Optional.empty(); } @@ -188,18 +210,18 @@ public Optional getDockerTokenServiceUri(String registryHost, boolean di // Extract the "Bearer realm" and "service" attributes from the Www-Authenticate value Map wwwAuthenticateAttributes = Stream.of(wwwAuthenticate.get(0).split(",")) - .map(s -> s.split("=")) - .collect(Collectors.toMap(b -> b[0], b -> b[1])); + .map(s -> s.split("=")) + .collect(Collectors.toMap(b -> b[0], b -> b[1])); if (CollectionUtils.isEmpty(wwwAuthenticateAttributes) - || !wwwAuthenticateAttributes.containsKey(BEARER_REALM_ATTRIBUTE) - || !wwwAuthenticateAttributes.containsKey(SERVICE_ATTRIBUTE)) { + || !wwwAuthenticateAttributes.containsKey(BEARER_REALM_ATTRIBUTE) + || !wwwAuthenticateAttributes.containsKey(SERVICE_ATTRIBUTE)) { logger.warn("Invalid Www-Authenticate: {} for container registry {}", wwwAuthenticate, registryHost); return Optional.empty(); } String tokenServiceUri = String.format("%s?service=%s&scope=repository:{repository}:pull", - wwwAuthenticateAttributes.get(BEARER_REALM_ATTRIBUTE), wwwAuthenticateAttributes.get(SERVICE_ATTRIBUTE)); + wwwAuthenticateAttributes.get(BEARER_REALM_ATTRIBUTE), wwwAuthenticateAttributes.get(SERVICE_ATTRIBUTE)); // remove redundant quotes. tokenServiceUri = tokenServiceUri.replaceAll("\"", ""); @@ -207,8 +229,9 @@ public Optional getDockerTokenServiceUri(String registryHost, boolean di logger.info("tokenServiceUri: " + tokenServiceUri); return Optional.of(tokenServiceUri); - } - catch (Exception e) { + } catch (Exception e) { + // Log error because we cannot change the contract that returns empty optional. + logger.error("Ignoring:" + e, e); return Optional.empty(); } } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java index 1fbe38d823..3d2b9f02bf 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerOAuth2RegistryAuthorizer.java @@ -122,7 +122,9 @@ public HttpHeaders getAuthorizationHeaders(ContainerImage containerImage, Contai } private RestTemplate getRestTemplate(ContainerRegistryConfiguration registryConfiguration) { - return this.containerImageRestTemplate.getContainerRestTemplate(registryConfiguration.isDisableSslVerification(), - registryConfiguration.isUseHttpProxy()); + return this.containerImageRestTemplate.getContainerRestTemplate( + registryConfiguration.isDisableSslVerification(), + registryConfiguration.isUseHttpProxy(), + registryConfiguration.getExtra()); } } diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategy.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategy.java deleted file mode 100644 index 596e152583..0000000000 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/DropAuthorizationHeaderOnSignedS3RequestRedirectStrategy.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2020-2020 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.cloud.dataflow.container.registry.authorization; - -import java.net.URI; -import java.util.Arrays; - -import org.apache.commons.lang3.StringUtils; -import org.apache.http.Header; -import org.apache.http.HttpRequest; -import org.apache.http.HttpResponse; -import org.apache.http.ProtocolException; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpRequestBase; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.impl.client.DefaultRedirectStrategy; -import org.apache.http.protocol.HttpContext; - -/** - * The Amazon S3 API supports two Authentication Methods (https://amzn.to/2Dg9sga): - * (1) HTTP Authorization header and (2) Query string parameters (often referred to as a pre-signed URL). - * - * But only one auth mechanism is allowed at a time. If the http request contains both an Authorization header and - * an pre-signed URL parameters then an error is thrown. - * - * Container Registries often use AmazonS3 as a backend object store. If HTTP Authorization header - * is used to authenticate with the Container Registry and then this registry redirect the request to a S3 storage - * using pre-signed URL authentication, the redirection will fail. - * - * Solution is to implement a HTTP redirect strategy that removes the original Authorization headers when the request is - * redirected toward an Amazon signed URL. - * - * @author Adam J. Weigold - */ -public class DropAuthorizationHeaderOnSignedS3RequestRedirectStrategy extends DefaultRedirectStrategy { - - private static final String AMZ_CREDENTIAL = "X-Amz-Credential"; - - private static final String AUTHORIZATION_HEADER = "Authorization"; - - @Override - public HttpUriRequest getRedirect(final HttpRequest request, final HttpResponse response, - final HttpContext context) throws ProtocolException { - - HttpUriRequest httpUriRequest = super.getRedirect(request, response, context); - - final String query = httpUriRequest.getURI().getQuery(); - - if (StringUtils.isNoneEmpty(query) && query.contains(AMZ_CREDENTIAL)) { - final String method = request.getRequestLine().getMethod(); - if (StringUtils.isNoneEmpty(method) - && (method.equalsIgnoreCase(HttpHead.METHOD_NAME) || method.equalsIgnoreCase(HttpGet.METHOD_NAME))) { - return new DropAuthorizationHeaderHttpRequestBase(httpUriRequest.getURI(), method); - } - } - - return httpUriRequest; - } - - /** - * Overrides all header setter methods to filter out the Authorization headers. - */ - static class DropAuthorizationHeaderHttpRequestBase extends HttpRequestBase { - - private final String method; - - DropAuthorizationHeaderHttpRequestBase(URI uri, String method) { - super(); - setURI(uri); - this.method = method; - } - - @Override - public String getMethod() { - return this.method; - } - - @Override - public void addHeader(Header header) { - if (!header.getName().equalsIgnoreCase(AUTHORIZATION_HEADER)) { - super.addHeader(header); - } - } - - @Override - public void addHeader(String name, String value) { - if (!name.equalsIgnoreCase(AUTHORIZATION_HEADER)) { - super.addHeader(name, value); - } - } - - @Override - public void setHeader(Header header) { - if (!header.getName().equalsIgnoreCase(AUTHORIZATION_HEADER)) { - super.setHeader(header); - } - } - - @Override - public void setHeader(String name, String value) { - if (!name.equalsIgnoreCase(AUTHORIZATION_HEADER)) { - super.setHeader(name, value); - } - } - - @Override - public void setHeaders(Header[] headers) { - Header[] filteredHeaders = Arrays.stream(headers) - .filter(header -> !header.getName().equalsIgnoreCase(AUTHORIZATION_HEADER)) - .toArray(Header[]::new); - super.setHeaders(filteredHeaders); - } - } -} diff --git a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java index b3fdb7249a..03899c135c 100644 --- a/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java +++ b/spring-cloud-dataflow-container-registry/src/main/java/org/springframework/cloud/dataflow/container/registry/authorization/RegistryAuthorizer.java @@ -50,6 +50,7 @@ public interface RegistryAuthorizer { /** * @param registryConfiguration configuration such as credentials and additional information required to obtain the * authorized headers. + * @param configProperties configuration properties for obtaining the authorized headers. * @return Returns HTTP headers, configured with authorization credentials or tokens that would allow access * the target Registry. */ diff --git a/spring-cloud-dataflow-container-registry/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-container-registry/src/main/resources/META-INF/spring.factories deleted file mode 100644 index a22b2b6b1e..0000000000 --- a/spring-cloud-dataflow-container-registry/src/main/resources/META-INF/spring.factories +++ /dev/null @@ -1,2 +0,0 @@ -org.springframework.boot.autoconfigure.EnableAutoConfiguration:\ -org.springframework.cloud.dataflow.container.registry.ContainerRegistryAutoConfiguration diff --git a/spring-cloud-dataflow-container-registry/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-dataflow-container-registry/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 0000000000..b1d6a52edb --- /dev/null +++ b/spring-cloud-dataflow-container-registry/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1 @@ +org.springframework.cloud.dataflow.container.registry.ContainerRegistryAutoConfiguration \ No newline at end of file diff --git a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java index 6897b8f6d0..971421844e 100644 --- a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java +++ b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerImageParserTests.java @@ -17,26 +17,27 @@ package org.springframework.cloud.dataflow.container.registry; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; /** * @author Christian Tzolov + * @author Corneil du Plessis */ -public class ContainerImageParserTests { +class ContainerImageParserTests { - private ContainerImageParser containerImageNameParser = + private final ContainerImageParser containerImageNameParser = new ContainerImageParser("test-domain.io", "tag654", "official-repo-name"); @Test - public void testParseWithoutDefaults2() { + void parseWithoutDefaults2() { ContainerImage containerImageName = - containerImageNameParser.parse("dev.registry.pivotal.io/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); + containerImageNameParser.parse("dev.registry.tanzu.vmware.com/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); - assertThat(containerImageName.getHostname()).isEqualTo("dev.registry.pivotal.io"); + assertThat(containerImageName.getHostname()).isEqualTo("dev.registry.tanzu.vmware.com"); assertThat(containerImageName.getRepositoryNamespace()).isEqualTo("p-scdf-for-kubernetes"); assertThat(containerImageName.getRepositoryName()).isEqualTo("spring-cloud-dataflow-composed-task-runner"); assertThat(containerImageName.getRepositoryTag()).isNull(); @@ -44,14 +45,14 @@ public void testParseWithoutDefaults2() { assertThat(containerImageName.getRepositoryDigest()).isEqualTo("sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); assertThat(containerImageName.getRepositoryReferenceType()).isEqualTo(ContainerImage.RepositoryReferenceType.digest); - assertThat(containerImageName.getRegistryHost()).isEqualTo("dev.registry.pivotal.io"); + assertThat(containerImageName.getRegistryHost()).isEqualTo("dev.registry.tanzu.vmware.com"); assertThat(containerImageName.getRepository()).isEqualTo("p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner"); - assertThat(containerImageName.getCanonicalName()).isEqualTo("dev.registry.pivotal.io/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); + assertThat(containerImageName.getCanonicalName()).isEqualTo("dev.registry.tanzu.vmware.com/p-scdf-for-kubernetes/spring-cloud-dataflow-composed-task-runner@sha256:c838be82e886b0db98ed847487ec6bf94f12e511ebe5659bd5fbe43597a4b734"); } @Test - public void testParseWithoutDefaults() { + void parseWithoutDefaults() { ContainerImage containerImageName = containerImageNameParser.parse("springsource-docker-private-local.jfrog.io:80/scdf/stream/spring-cloud-dataflow-acceptance-image-drivers173:123"); @@ -70,7 +71,7 @@ public void testParseWithoutDefaults() { } @Test - public void testParseWithoutDigest() { + void parseWithoutDigest() { ContainerImage containerImageName = containerImageNameParser.parse("springsource-docker-private-local.jfrog.io:80/scdf/stream/spring-cloud-dataflow-acceptance-image-drivers173@sha256:d44e9ac4c4bf53fb0b5424c35c85230a28eb03f24a2ade5bb7f2cc1462846401"); @@ -89,7 +90,7 @@ public void testParseWithoutDigest() { } @Test - public void testParseWithDefaults() { + void parseWithDefaults() { ContainerImage containerImageName = containerImageNameParser.parse("simple-repo-name"); assertThat(containerImageName.getHostname()).isEqualTo("test-domain.io"); @@ -104,14 +105,14 @@ public void testParseWithDefaults() { } @Test - public void testInvalidRegistryHostName() { - Assertions.assertThrows(IllegalArgumentException.class, () -> + void invalidRegistryHostName() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> containerImageNameParser.parse("6666#.6:80/scdf/spring-image:123")); } @Test - public void testInvalidRegistryPart() { - Assertions.assertThrows(IllegalArgumentException.class, () -> + void invalidRegistryPart() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> containerImageNameParser.parse("localhost:80bla/scdf/spring-image:123")); } } diff --git a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java index 7d7a6b79ac..e1fdb48de8 100644 --- a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java +++ b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/ContainerRegistryConfigurationPropertiesTest.java @@ -19,7 +19,7 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.runner.ApplicationContextRunner; @@ -30,13 +30,14 @@ /** * @author Christian Tzolov + * @author Corneil du Plessis */ -public class ContainerRegistryConfigurationPropertiesTest { +class ContainerRegistryConfigurationPropertiesTest { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner(); @Test - public void registryConfigurationProperties() { + void registryConfigurationProperties() { this.contextRunner .withInitializer(context -> { Map map = new HashMap<>(); @@ -81,8 +82,8 @@ public void registryConfigurationProperties() { assertThat(myamazonawsConf.getSecret()).isEqualTo("myawspassword"); assertThat(myamazonawsConf.isDisableSslVerification()).isFalse(); assertThat(myamazonawsConf.getExtra()).hasSize(2); - assertThat(myamazonawsConf.getExtra().get("region")).isEqualTo("us-west-1"); - assertThat(myamazonawsConf.getExtra().get("registryIds")).isEqualTo("283191309520"); + assertThat(myamazonawsConf.getExtra()).containsEntry("region", "us-west-1"); + assertThat(myamazonawsConf.getExtra()).containsEntry("registryIds", "283191309520"); }); } diff --git a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java index 2c9eb3eb25..e0f255987d 100644 --- a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java +++ b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest.java @@ -21,8 +21,8 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -36,19 +36,18 @@ import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; /** * @author Christian Tzolov + * @author Corneil du Plessis */ -public class DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest { +class DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest { @Mock private RestTemplate mockRestTemplate; @@ -58,15 +57,15 @@ public class DockerConfigJsonSecretToContainerRegistryConfigurationConverterTest private DockerConfigJsonSecretToRegistryConfigurationConverter converter; - @Before - public void init() { + @BeforeEach + void init() { MockitoAnnotations.initMocks(this); - when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean())).thenReturn(mockRestTemplate); + when(containerImageRestTemplateFactory.getContainerRestTemplate(anyBoolean(), anyBoolean(), anyMap())).thenReturn(mockRestTemplate); converter = new DockerConfigJsonSecretToRegistryConfigurationConverter(new ContainerRegistryProperties(), containerImageRestTemplateFactory); } @Test - public void testConvertAnonymousRegistry() throws URISyntaxException { + void convertAnonymousRegistry() throws URISyntaxException { when(mockRestTemplate.exchange( eq(new URI("https://demo.repository.io/v2/_catalog")), eq(HttpMethod.GET), any(), eq(Map.class))) @@ -75,19 +74,20 @@ public void testConvertAnonymousRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertTrue(result.containsKey("demo.repository.io")); + assertThat(result) + .hasSize(1) + .containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), nullValue()); - assertThat(registryConfiguration.getSecret(), nullValue()); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.anonymous)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isNull(); + assertThat(registryConfiguration.getSecret()).isNull(); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.anonymous); } @Test - public void testConvertBasicAuthRegistry() throws URISyntaxException { + void convertBasicAuthRegistry() throws URISyntaxException { when(mockRestTemplate.exchange( eq(new URI("https://demo.repository.io/v2/_catalog")), eq(HttpMethod.GET), any(), eq(Map.class))) @@ -96,19 +96,42 @@ public void testConvertBasicAuthRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertTrue(result.containsKey("demo.repository.io")); + assertThat(result) + .hasSize(1) + .containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.basicauth)); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.basicauth); } @Test - public void testConvertDockerHubRegistry() throws URISyntaxException { + void convertWithPort() throws URISyntaxException { + + when(mockRestTemplate.exchange( + eq(new URI("https://demo.repository.io/v2/_catalog")), eq(HttpMethod.GET), any(), eq(Map.class))) + .thenReturn(new ResponseEntity<>(new HashMap<>(), HttpStatus.OK)); + + String b = "{\"auths\":{\"demo.repository.io:5050\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; + Map result = converter.convert(b); + + assertThat(result) + .hasSize(1) + .containsKey("demo.repository.io:5050"); + + ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io:5050"); + + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io:5050"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()).isEqualTo(ContainerRegistryConfiguration.AuthorizationType.basicauth); + } + + @Test + void convertDockerHubRegistry() throws URISyntaxException { HttpHeaders authenticateHeader = new HttpHeaders(); authenticateHeader.add("Www-Authenticate", "Bearer realm=\"https://demo.repository.io/service/token/",service=\"demo-registry\",scope=\"registry:category:pull\""); @@ -121,17 +144,18 @@ public void testConvertDockerHubRegistry() throws URISyntaxException { String b = "{\"auths\":{\"demo.repository.io\":{\"username\":\"testuser\",\"password\":\"testpassword\",\"auth\":\"YWRtaW46SGFyYm9yMTIzNDU=\"}}}"; Map result = converter.convert(b); - assertThat(result.size(), is(1)); - assertTrue(result.containsKey("demo.repository.io")); + assertThat(result) + .hasSize(1) + .containsKey("demo.repository.io"); ContainerRegistryConfiguration registryConfiguration = result.get("demo.repository.io"); - assertThat(registryConfiguration.getRegistryHost(), is("demo.repository.io")); - assertThat(registryConfiguration.getUser(), is("testuser")); - assertThat(registryConfiguration.getSecret(), is("testpassword")); - assertThat(registryConfiguration.getAuthorizationType(), is(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2)); - assertThat(registryConfiguration.getExtra().get("registryAuthUri"), - is("https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull")); + assertThat(registryConfiguration.getRegistryHost()).isEqualTo("demo.repository.io"); + assertThat(registryConfiguration.getUser()).isEqualTo("testuser"); + assertThat(registryConfiguration.getSecret()).isEqualTo("testpassword"); + assertThat(registryConfiguration.getAuthorizationType()) + .isEqualTo(ContainerRegistryConfiguration.AuthorizationType.dockeroauth2); + assertThat(registryConfiguration.getExtra()).containsEntry("registryAuthUri", "https://demo.repository.io/service/token?service=demo-registry&scope=repository:{repository}:pull"); } diff --git a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java index a3a5f2fe67..02f1ad5587 100644 --- a/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java +++ b/spring-cloud-dataflow-container-registry/src/test/java/org/springframework/cloud/dataflow/container/registry/authorization/support/S3SignedRedirectRequestController.java @@ -17,6 +17,7 @@ package org.springframework.cloud.dataflow.container.registry.authorization.support; import java.util.Collections; +import java.util.Locale; import java.util.Map; import org.springframework.cloud.dataflow.container.registry.ContainerRegistryProperties; @@ -43,7 +44,7 @@ public ResponseEntity> getToken() { @RequestMapping("/v2/test/s3-redirect-image/manifests/1.0.0") public ResponseEntity getManifests(@RequestHeader("Authorization") String token) { - if (!"bearer my_token_999".equals(token.trim().toLowerCase())) { + if (!"bearer my_token_999".equals(token.trim().toLowerCase(Locale.ROOT))) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } return buildFromString("{\"config\": {\"digest\": \"signed_redirect_digest\"} }"); @@ -51,7 +52,7 @@ public ResponseEntity getManifests(@RequestHeader("Authorization") Str @RequestMapping("/v2/test/s3-redirect-image/blobs/signed_redirect_digest") public ResponseEntity> getBlobRedirect(@RequestHeader("Authorization") String token) { - if (!"bearer my_token_999".equals(token.trim().toLowerCase())) { + if (!"bearer my_token_999".equals(token.trim().toLowerCase(Locale.ROOT))) { return new ResponseEntity<>(HttpStatus.BAD_REQUEST); } HttpHeaders redirectHeaders = new HttpHeaders(); diff --git a/spring-cloud-dataflow-core-dsl/pom.xml b/spring-cloud-dataflow-core-dsl/pom.xml index 028444ce4b..6f37da19f4 100644 --- a/spring-cloud-dataflow-core-dsl/pom.xml +++ b/spring-cloud-dataflow-core-dsl/pom.xml @@ -4,10 +4,17 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-core-dsl + spring-cloud-dataflow-core-dsl + Spring Cloud Data Flow Core DSL jar + + true + 3.4.1 + org.springframework @@ -23,4 +30,36 @@ test + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java index 092bb3bf44..6156fedc24 100644 --- a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java +++ b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/StreamParser.java @@ -315,6 +315,7 @@ protected String getTokenData(Token token) { * Expected format: {@code ':' identifier [ '.' identifier ]*} *

* + * @param canDefault allows the user to peek ahead to parse a reference when working with colons in the syntax. * @return {@code DestinationNode} representing the destination reference */ protected DestinationNode eatDestinationReference(boolean canDefault) { @@ -366,7 +367,7 @@ protected DestinationNode eatDestinationReference(boolean canDefault) { *

* Expected formats: {@code appList: app (| app)*} A stream may end in an app (if it is * a sink) or be followed by a sink destination. - * + * @param preceedingSourceChannelSpecified indicator to parser about state of stream. * @return a list of {@code AppNode} */ protected List eatAppList(boolean preceedingSourceChannelSpecified) { diff --git a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java index 41db7e373c..1473184399 100644 --- a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java +++ b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TaskVisitor.java @@ -20,7 +20,7 @@ * Basic visitor pattern for a parsed task. Provide a concrete implementation to * participate in the visit and pass it to a parsed TaskNode. A simple task only has one * sequence, for example: {@code appA && appB && appC}. In this situation - * preVisit(int) and postVisit(int) will only be called with 0. A more + * preVisit(int) and postVisit(int) will only be called with 0. A more * complex situation would be: * *

@@ -32,7 +32,7 @@
  *
  * This includes two sequences - as in two separate definitions. The primary definition
  * references other definitions where it would be too messy to inline them. In this case
- * preVisit(int) would be called for both 0 and 1.
+ * {@link #preVisit(FlowNode)} would be called.
  *
  * @author Andy Clement
  */
@@ -112,7 +112,7 @@ public void postVisit(TaskAppNode taskApp) {
 	}
 
 	/**
-	 * After visit(TaskAppNode) and before postVisit(TaskAppNode) the
+	 * After {@link #visit(TaskAppNode)} and before {@link #postVisit(TaskAppNode)} the
 	 * transitions (if there are any) are visited for that task app.
 	 *
 	 * @param transition the transition
diff --git a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/Tokens.java b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/Tokens.java
index 2525e7a7d1..d9f5cd1426 100644
--- a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/Tokens.java
+++ b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/Tokens.java
@@ -18,6 +18,7 @@
 
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 
 /**
  * Class that converts an expression into a list of {@link Token tokens}. Furthermore,
@@ -218,8 +219,8 @@ public Token eat(TokenKind expectedKind) {
 			raiseException(expression.length(), DSLMessage.OOD);
 		}
 		if (t.kind != expectedKind) {
-			raiseException(t.startPos, DSLMessage.NOT_EXPECTED_TOKEN, expectedKind.toString().toLowerCase(),
-					(t.data == null) ? new String(t.getKind().tokenChars).toLowerCase() : t.data);
+			raiseException(t.startPos, DSLMessage.NOT_EXPECTED_TOKEN, expectedKind.toString().toLowerCase(Locale.ROOT),
+					(t.data == null) ? new String(t.getKind().tokenChars).toLowerCase(Locale.ROOT) : t.data);
 		}
 		return t;
 	}
diff --git a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java
index 58f0558585..e92946b277 100644
--- a/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java
+++ b/spring-cloud-dataflow-core-dsl/src/main/java/org/springframework/cloud/dataflow/core/dsl/TransitionNode.java
@@ -18,12 +18,12 @@
 
 /**
  * An AST node representing a transition found in a parsed task specification. A
- * transition is expressed in the form "{@code STATE->TARGET}". If STATE is
+ * transition is expressed in the form "{@code STATE->TARGET}". If {@code STATE} is
  * unquoted it is considered a reference to the exit code of the preceding app (where
- * * means 'any exit code'). If STATE is quoted it is considered a
- * reference to the exit status of the preceding app (where '*' means 'any exit
- * status'). TARGET can be either a reference to a label, :foo, or a single app
- * name Foo.
+ * {@code *} means 'any exit code'). If {@code STATE} is quoted it is considered a
+ * reference to the exit status of the preceding app (where {@code '*'} means 'any exit
+ * status'). TARGET can be either a reference to a label, {@code :foo}, or a single app
+ * name {@code Foo}.
  *
  * @author Andy Clement
  */
@@ -134,8 +134,6 @@ public String getStatusToCheckInDSLForm() {
 	}
 
 	/**
-	 * The target is either an app or a reference. If it is an app then call
-	 * getTargetApp otherwise call getTargetReference.
 	 *
 	 * @return true if the target is an app
 	 */
diff --git a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java
index ff25f306d9..5e31bba3a9 100644
--- a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java
+++ b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/NodeTests.java
@@ -17,33 +17,35 @@
 
 import java.util.Collections;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import static org.junit.Assert.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
 
 /**
  * @author Oleg Zhurakousky
  * @author Andy Clement
+ * @author Corneil du Plessis
  */
-public class NodeTests {
+class NodeTests {
 
 	@Test
-	public void testDestinationNodeDestinationName(){
+	void destinationNodeDestinationName(){
 		DestinationNode node = new DestinationNode(0, 0, "foo.bar.bazz", null);
-		assertEquals("foo.bar.bazz", node.getDestinationName());
+		assertThat(node.getDestinationName()).isEqualTo("foo.bar.bazz");
 	}
 
 	@Test
-	public void testDestinationNodeToString(){
+	void destinationNodeToString(){
 		ArgumentNode an1 = new ArgumentNode("foo", "bar", 0, 4);
 		ArgumentNode an2 = new ArgumentNode("abc", "'xyz'", 0, 4);
 		DestinationNode node = new DestinationNode(0, 4, "foo.bar.bazz", new ArgumentNode[]{an1, an2});
 		System.out.println(node.stringify());
-		assertEquals(":foo.bar.bazz", node.toString());
+		assertThat(node.toString()).isEqualTo(":foo.bar.bazz");
 	}
 
-	@Test // see https://github.com/spring-cloud/spring-cloud-dataflow/issues/1568
-	public void testStreamNodesToString(){
+	// see https://github.com/spring-cloud/spring-cloud-dataflow/issues/1568
+	@Test
+	void streamNodesToString(){
 		ArgumentNode an1 = new ArgumentNode("foo", "bar", 0, 4);
 		ArgumentNode an2 = new ArgumentNode("abc", "'xyz'", 0, 4);
 		AppNode appNode = new AppNode(null, "bar", 0, 2, new ArgumentNode[]{an1, an2});
@@ -53,6 +55,6 @@ public void testStreamNodesToString(){
 		DestinationNode sinkDNode = new DestinationNode(0, 0, "sink.bar.bazz", null);
 		SinkDestinationNode sink = new SinkDestinationNode(sinkDNode, 4);
 		StreamNode sNode = new StreamNode(null, "myStream", Collections.singletonList(appNode), source, sink);
-		assertEquals("myStream = :source.bar.bazz > bar --foo=bar --abc='xyz' > :sink.bar.bazz", sNode.toString());
+		assertThat(sNode.toString()).isEqualTo("myStream = :source.bar.bazz > bar --foo=bar --abc='xyz' > :sink.bar.bazz");
 	}
 }
diff --git a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java
index 0638067930..2797ce6711 100644
--- a/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java
+++ b/spring-cloud-dataflow-core-dsl/src/test/java/org/springframework/cloud/dataflow/core/dsl/TaskParserTests.java
@@ -22,19 +22,16 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 import org.springframework.cloud.dataflow.core.dsl.graph.Graph;
 import org.springframework.cloud.dataflow.core.dsl.graph.Link;
 import org.springframework.cloud.dataflow.core.dsl.graph.Node;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.fail;
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
 
 /**
  * Test the parser and visitor infrastructure. Check it accepts expected data and
@@ -45,82 +42,87 @@
  * @author David Turanski
  * @author Michael Minella
  * @author Eric Bottard
+ * @author Corneil du Plessis
  */
-public class TaskParserTests {
+class TaskParserTests {
 
 	private TaskNode ctn;
 
 	private TaskAppNode appNode;
 
 	@Test
-	public void oneApp() {
+	void oneApp() {
 		TaskNode taskNode = parse("foo");
-		assertFalse(taskNode.isComposed());
+		assertThat(taskNode.isComposed()).isFalse();
 		TaskAppNode appNode = taskNode.getTaskApp();
-		assertEquals("foo", appNode.getName());
-		assertEquals(0, appNode.getArguments().length);
-		assertEquals(0, appNode.startPos);
-		assertEquals(3, appNode.endPos);
+		assertThat(appNode.getName()).isEqualTo("foo");
+		assertThat(appNode.getArguments().length).isEqualTo(0);
+		assertThat(appNode.startPos).isEqualTo(0);
+		assertThat(appNode.endPos).isEqualTo(3);
 	}
 
 	@Test
-	public void hyphenatedAppName() {
+	void hyphenatedAppName() {
 		appNode = parse("gemfire-cq").getTaskApp();
-		assertEquals("gemfire-cq:0>10", appNode.stringify(true));
+		assertThat(appNode.stringify(true)).isEqualTo("gemfire-cq:0>10");
 	}
 
 	@Test
-	public void oneAppWithParam() {
+	void oneAppWithParam() {
 		appNode = parse("foo --name=value").getTaskApp();
-		assertEquals("foo --name=value:0>16", appNode.stringify(true));
+		assertThat(appNode.stringify(true)).isEqualTo("foo --name=value:0>16");
 	}
 
 	@Test
-	public void oneAppWithTwoParams() {
+	void oneAppWithTwoParams() {
 		appNode = parse("foo --name=value --x=y").getTaskApp();
 
-		assertEquals("foo", appNode.getName());
+		assertThat(appNode.getName()).isEqualTo("foo");
 		ArgumentNode[] args = appNode.getArguments();
-		assertNotNull(args);
-		assertEquals(2, args.length);
-		assertEquals("name", args[0].getName());
-		assertEquals("value", args[0].getValue());
-		assertEquals("x", args[1].getName());
-		assertEquals("y", args[1].getValue());
+		assertThat(args).isNotNull();
+		assertThat(args.length).isEqualTo(2);
+		assertThat(args[0].getName()).isEqualTo("name");
+		assertThat(args[0].getValue()).isEqualTo("value");
+		assertThat(args[1].getName()).isEqualTo("x");
+		assertThat(args[1].getValue()).isEqualTo("y");
 
-		assertEquals("foo --name=value --x=y:0>22", appNode.stringify(true));
+		assertThat(appNode.stringify(true)).isEqualTo("foo --name=value --x=y:0>22");
 	}
 
 	@Test
-	public void testParameters() {
+	void parameters() {
 		String module = "gemfire-cq --query='Select * from /Stocks where symbol=''VMW''' --regionName=foo --foo=bar";
 		TaskAppNode gemfireApp = parse(module).getTaskApp();
 		Map parameters = gemfireApp.getArgumentsAsMap();
-		assertEquals(3, parameters.size());
-		assertEquals("Select * from /Stocks where symbol='VMW'", parameters.get("query"));
-		assertEquals("foo", parameters.get("regionName"));
-		assertEquals("bar", parameters.get("foo"));
+		assertThat(parameters)
+				.hasSize(3)
+				.containsEntry("query", "Select * from /Stocks where symbol='VMW'")
+				.containsEntry("regionName", "foo")
+				.containsEntry("foo", "bar");
 
 		module = "test";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(0, parameters.size());
+		assertThat(parameters).isEmpty();
 
 		module = "foo --x=1 --y=two ";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(2, parameters.size());
-		assertEquals("1", parameters.get("x"));
-		assertEquals("two", parameters.get("y"));
+		assertThat(parameters)
+				.hasSize(2)
+				.containsEntry("x", "1")
+				.containsEntry("y", "two");
 
 		module = "foo --x=1a2b --y=two ";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(2, parameters.size());
-		assertEquals("1a2b", parameters.get("x"));
-		assertEquals("two", parameters.get("y"));
+		assertThat(parameters)
+				.hasSize(2)
+				.containsEntry("x", "1a2b")
+				.containsEntry("y", "two");
 
 		module = "foo --x=2";
 		parameters = parse(module).getTaskApp().getArgumentsAsMap();
-		assertEquals(1, parameters.size());
-		assertEquals("2", parameters.get("x"));
+		assertThat(parameters)
+				.hasSize(1)
+				.containsEntry("x", "2");
 
 		module = "--foo = bar";
 		try {
@@ -133,7 +135,7 @@ public void testParameters() {
 	}
 
 	@Test
-	public void testInvalidApps() {
+	void invalidApps() {
 		String config = "foo--x=13";
 		TaskParser parser = new TaskParser("t", config, true, true);
 		try {
@@ -146,23 +148,23 @@ public void testInvalidApps() {
 	}
 
 	@Test
-	public void expressions_xd159() {
+	void expressions_xd159() {
 		appNode = parse("transform --expression=--payload").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("--payload", props.get("expression"));
+		assertThat(props).containsEntry("expression", "--payload");
 	}
 
 	@Test
-	public void expressions_xd159_2() {
+	void expressions_xd159_2() {
 		// need quotes around an argument value with a space in it
 		checkForParseError("transform --expression=new StringBuilder(payload).reverse()", DSLMessage.TASK_MORE_INPUT,
 				27);
 		appNode = parse("transform --expression='new StringBuilder(payload).reverse()'").getTaskApp();
-		assertEquals("new StringBuilder(payload).reverse()", appNode.getArgumentsAsMap().get("expression"));
+		assertThat(appNode.getArgumentsAsMap()).containsEntry("expression", "new StringBuilder(payload).reverse()");
 	}
 
 	@Test
-	public void ensureTaskNamesValid_xd1344() {
+	void ensureTaskNamesValid_xd1344() {
 		// Similar rules to a java identifier but also allowed '-' after the first char
 		checkForIllegalTaskName("foo.bar", "task");
 		checkForIllegalTaskName("-bar", "task");
@@ -175,20 +177,20 @@ public void ensureTaskNamesValid_xd1344() {
 	}
 
 	@Test
-	public void expressions_xd159_3() {
+	void expressions_xd159_3() {
 		appNode = parse("transform --expression='new StringBuilder(payload).reverse()'").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("new StringBuilder(payload).reverse()", props.get("expression"));
+		assertThat(props).containsEntry("expression", "new StringBuilder(payload).reverse()");
 	}
 
 	@Test
-	public void expressions_xd159_4() {
+	void expressions_xd159_4() {
 		appNode = parse("transform --expression=\"'Hello, world!'\"").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("'Hello, world!'", props.get("expression"));
+		assertThat(props).containsEntry("expression", "'Hello, world!'");
 		appNode = parse("transform --expression='''Hello, world!'''").getTaskApp();
 		props = appNode.getArgumentsAsMap();
-		assertEquals("'Hello, world!'", props.get("expression"));
+		assertThat(props).containsEntry("expression", "'Hello, world!'");
 		// Prior to the change for XD-1613, this error should point to the comma:
 		// checkForParseError("foo | transform --expression=''Hello, world!'' | bar",
 		// DSLMessage.UNEXPECTED_DATA, 37);
@@ -197,35 +199,35 @@ public void expressions_xd159_4() {
 	}
 
 	@Test
-	public void expressions_gh1() {
+	void expressions_gh1() {
 		appNode = parse("filter --expression=\"payload == 'foo'\"").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("payload == 'foo'", props.get("expression"));
+		assertThat(props).containsEntry("expression", "payload == 'foo'");
 	}
 
 	@Test
-	public void expressions_gh1_2() {
+	void expressions_gh1_2() {
 		appNode = parse("filter --expression='new Foo()'").getTaskApp();
 		Map props = appNode.getArgumentsAsMap();
-		assertEquals("new Foo()", props.get("expression"));
+		assertThat(props).containsEntry("expression", "new Foo()");
 	}
 
 	@Test
-	public void errorCases01() {
+	void errorCases01() {
 		checkForParseError(".", DSLMessage.EXPECTED_APPNAME, 0, ".");
-		assertEquals("a-_", parse("foo", "a-_", true).getTaskApp().getName());
-		assertEquals("a_b", parse("foo", "a_b", true).getTaskApp().getName());
+		assertThat(parse("foo", "a-_", true).getTaskApp().getName()).isEqualTo("a-_");
+		assertThat(parse("foo", "a_b", true).getTaskApp().getName()).isEqualTo("a_b");
 		checkForParseError(";", DSLMessage.EXPECTED_APPNAME, 0, ";");
 	}
 
 	@Test
-	public void errorCases04() {
+	void errorCases04() {
 		checkForParseError("foo bar=yyy", DSLMessage.TASK_MORE_INPUT, 4, "bar");
 		checkForParseError("foo bar", DSLMessage.TASK_MORE_INPUT, 4, "bar");
 	}
 
 	@Test
-	public void shortArgValues_2499() {
+	void shortArgValues_2499() {
 		// This is the expected result when an argument value is missing:
 		checkForParseError("aaa --bbb= --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 11);
 		// From AbstractTokenizer.isArgValueIdentifierTerminator these are the 'special chars' that should
@@ -242,49 +244,49 @@ public void shortArgValues_2499() {
 	}
 
 	@Test
-	public void errorCases05() {
+	void errorCases05() {
 		checkForParseError("foo --", DSLMessage.OOD, 6);
 		checkForParseError("foo --bar", DSLMessage.OOD, 9);
 		checkForParseError("foo --bar=", DSLMessage.OOD, 10);
 	}
 
 	@Test
-	public void errorCases06() {
+	void errorCases06() {
 		// Exception thrown by tokenizer, which doesn't know that the app name is missing
 		checkForParseError("|", DSLMessage.TASK_DOUBLE_OR_REQUIRED, 0);
 	}
 
 	// Parameters must be constructed via adjacent tokens
 	@Test
-	public void needAdjacentTokensForParameters() {
+	void needAdjacentTokensForParameters() {
 		checkForParseError("foo -- name=value", DSLMessage.NO_WHITESPACE_BEFORE_ARG_NAME, 7);
 		checkForParseError("foo --name =value", DSLMessage.NO_WHITESPACE_BEFORE_ARG_EQUALS, 11);
 		checkForParseError("foo --name= value", DSLMessage.NO_WHITESPACE_BEFORE_ARG_VALUE, 12);
 	}
 
 	@Test
-	public void testComposedOptionNameErros() {
+	void composedOptionNameErros() {
 		checkForParseError("foo --name.=value", DSLMessage.NOT_EXPECTED_TOKEN, 11);
 		checkForParseError("foo --name .sub=value", DSLMessage.NO_WHITESPACE_IN_DOTTED_NAME, 11);
 		checkForParseError("foo --name. sub=value", DSLMessage.NO_WHITESPACE_IN_DOTTED_NAME, 12);
 	}
 
 	@Test
-	public void testXD2416() {
+	void xd2416() {
 		appNode = parse("transform --expression='payload.replace(\"abc\", \"\")'").getTaskApp();
-		assertEquals(appNode.getArgumentsAsMap().get("expression"), "payload.replace(\"abc\", \"\")");
+		assertThat(appNode.getArgumentsAsMap()).containsEntry("expression", "payload.replace(\"abc\", \"\")");
 
 		appNode = parse("transform --expression='payload.replace(\"abc\", '''')'").getTaskApp();
-		assertEquals(appNode.getArgumentsAsMap().get("expression"), "payload.replace(\"abc\", '')");
+		assertThat(appNode.getArgumentsAsMap()).containsEntry("expression", "payload.replace(\"abc\", '')");
 	}
 
 	@Test
-	public void testUnbalancedSingleQuotes() {
+	void unbalancedSingleQuotes() {
 		checkForParseError("timestamp --format='YYYY", DSLMessage.NON_TERMINATING_QUOTED_STRING, 19);
 	}
 
 	@Test
-	public void testUnbalancedDoubleQuotes() {
+	void unbalancedDoubleQuotes() {
 		checkForParseError("timestamp --format=\"YYYY", DSLMessage.NON_TERMINATING_DOUBLE_QUOTED_STRING, 19);
 	}
 
@@ -294,115 +296,105 @@ private void checkForIllegalTaskName(String taskName, String taskDef) {
 			fail("expected to fail but parsed " + appNode.stringify());
 		}
 		catch (ParseException e) {
-			assertEquals(DSLMessage.ILLEGAL_TASK_NAME, e.getMessageCode());
-			assertEquals(0, e.getPosition());
-			assertEquals(taskName, e.getInserts()[0]);
+			assertThat(e.getMessageCode()).isEqualTo(DSLMessage.ILLEGAL_TASK_NAME);
+			assertThat(e.getPosition()).isEqualTo(0);
+			assertThat(e.getInserts()[0]).isEqualTo(taskName);
 		}
 	}
 
 	@Test
-	public void executableDsl() {
+	void executableDsl() {
 		TaskNode ctn = parse("foo", "appA && appB", true);
 		List taskApps = ctn.getTaskApps();
-		assertEquals("appA", taskApps.get(0).getName());
-		assertEquals("foo-appA", taskApps.get(0).getExecutableDSLName());
-		assertEquals("appB", taskApps.get(1).getName());
-		assertEquals("foo-appB", taskApps.get(1).getExecutableDSLName());
+		assertThat(taskApps.get(0).getName()).isEqualTo("appA");
+		assertThat(taskApps.get(0).getExecutableDSLName()).isEqualTo("foo-appA");
+		assertThat(taskApps.get(1).getName()).isEqualTo("appB");
+		assertThat(taskApps.get(1).getExecutableDSLName()).isEqualTo("foo-appB");
 
 		ctn = parse("bar", "appC && goo: appC", true);
 		taskApps = ctn.getTaskApps();
-		assertEquals("appC", taskApps.get(0).getName());
-		assertEquals("bar-appC", taskApps.get(0).getExecutableDSLName());
-		assertEquals("appC", taskApps.get(1).getName());
-		assertEquals("bar-goo", taskApps.get(1).getExecutableDSLName());
+		assertThat(taskApps.get(0).getName()).isEqualTo("appC");
+		assertThat(taskApps.get(0).getExecutableDSLName()).isEqualTo("bar-appC");
+		assertThat(taskApps.get(1).getName()).isEqualTo("appC");
+		assertThat(taskApps.get(1).getExecutableDSLName()).isEqualTo("bar-goo");
 
 		// flows
-		assertEquals("foo-appA", parse("foo", "appA", true).toExecutableDSL());
-		assertEquals("foo-appA && foo-appB", parse("foo", "appA && appB", true).toExecutableDSL());
-		assertEquals("foo-appA && foo-appB && foo-appC", parse("foo", "appA && appB && appC", true).toExecutableDSL());
+		assertThat(parse("foo", "appA", true).toExecutableDSL()).isEqualTo("foo-appA");
+		assertThat(parse("foo", "appA && appB", true).toExecutableDSL()).isEqualTo("foo-appA && foo-appB");
+		assertThat(parse("foo", "appA && appB && appC", true).toExecutableDSL()).isEqualTo("foo-appA && foo-appB && foo-appC");
 
 		assertTaskApps("foo", "appA", "foo-appA");
 		assertTaskApps("foo", "appA && appB", "foo-appA", "foo-appB");
 		assertTaskApps("foo", "appA && appB && appC", "foo-appA", "foo-appB", "foo-appC");
 
 		// arguments
-		assertEquals("foo-appA", parse("foo", "appA --p1=v1 --p2=v2", true).toExecutableDSL());
-		assertEquals("foo-appA && foo-appB", parse("foo", "appA --p2=v2 && appB --p3=v3", true).toExecutableDSL());
+		assertThat(parse("foo", "appA --p1=v1 --p2=v2", true).toExecutableDSL()).isEqualTo("foo-appA");
+		assertThat(parse("foo", "appA --p2=v2 && appB --p3=v3", true).toExecutableDSL()).isEqualTo("foo-appA && foo-appB");
 		assertTaskApps("foo", "appA --p1=v2", "foo-appA:p1=v2");
 		assertTaskApps("foo", "appA --p1=v2 && goo: appB --p2=v2", "foo-appA:p1=v2", "foo-goo:p2=v2");
 		assertTaskApps("foo", "appA 0->x:appA --p1=v1", "foo-appA", "foo-x:p1=v1");
 
 		// labels
-		assertEquals("bar-goo", parse("bar", "goo:appA", true).toExecutableDSL());
-		assertEquals("fo-aaa && fo-bbb", parse("fo", "aaa: appA && bbb: appA", true).toExecutableDSL());
+		assertThat(parse("bar", "goo:appA", true).toExecutableDSL()).isEqualTo("bar-goo");
+		assertThat(parse("fo", "aaa: appA && bbb: appA", true).toExecutableDSL()).isEqualTo("fo-aaa && fo-bbb");
 
 		assertTaskApps("bar", "goo:appA", "bar-goo");
 		assertTaskApps("bar", "appA && goo: appA", "bar-appA", "bar-goo");
 
 		// transitions
-		assertEquals("foo-appA 'c'->foo-appC && foo-appB",
-				parse("foo", "appA 'c'->appC && appB", true).toExecutableDSL());
-		assertEquals("foo-appA 'c'->foo-appC 'd'->foo-appD && foo-appB",
-				parse("foo", "appA 'c'->appC 'd'->appD && " + "appB", true).toExecutableDSL());
-		assertEquals("foo-appA 1->foo-appC 2->foo-appD && foo-appB",
-				parse("foo", "appA 1->appC 2->appD && appB", true).toExecutableDSL());
-		assertEquals("foo-aaa 1->foo-appC 2->:aaa", parse("foo", "aaa: appA 1->appC 2->:aaa", true).toExecutableDSL());
+		assertThat(parse("foo", "appA 'c'->appC && appB", true).toExecutableDSL()).isEqualTo("foo-appA 'c'->foo-appC && foo-appB");
+		assertThat(parse("foo", "appA 'c'->appC 'd'->appD && " + "appB", true).toExecutableDSL()).isEqualTo("foo-appA 'c'->foo-appC 'd'->foo-appD && foo-appB");
+		assertThat(parse("foo", "appA 1->appC 2->appD && appB", true).toExecutableDSL()).isEqualTo("foo-appA 1->foo-appC 2->foo-appD && foo-appB");
+		assertThat(parse("foo", "aaa: appA 1->appC 2->:aaa", true).toExecutableDSL()).isEqualTo("foo-aaa 1->foo-appC 2->:aaa");
 
 		// splits
-		assertEquals("", parse("foo", "", true).toExecutableDSL());
-		assertEquals("",
-				parse("foo", "", true).toExecutableDSL());
-		assertEquals("< || foo-appB>",
-				parse("foo", "< || " + "appB>", true).toExecutableDSL());
-		assertEquals("< || foo-appB>",
-				parse("foo", "< || appB>", true).toExecutableDSL());
+		assertThat(parse("foo", "", true).toExecutableDSL()).isEqualTo("");
+		assertThat(parse("foo", "", true).toExecutableDSL()).isEqualTo("");
+		assertThat(parse("foo", "< || " + "appB>", true).toExecutableDSL()).isEqualTo("< || foo-appB>");
+		assertThat(parse("foo", "< || appB>", true).toExecutableDSL()).isEqualTo("< || foo-appB>");
 
 		// splits and flows
-		assertEquals("foo-AAA && foo-FFF 'FAILED'->foo-EEE &&  && foo-DDD",
-				parse("foo", "AAA && " + "FFF 'FAILED' -> EEE &&  && DDD", true).toExecutableDSL());
+		assertThat(parse("foo", "AAA && " + "FFF 'FAILED' -> EEE &&  && DDD", true).toExecutableDSL()).isEqualTo("foo-AAA && foo-FFF 'FAILED'->foo-EEE &&  && foo-DDD");
 		assertTaskApps("foo", "AAA && FFF 'FAILED' -> EEE &&  && DDD", "foo-AAA", "foo-FFF", "foo-EEE",
 				"foo-BBB", "foo-CCC", "foo-DDD");
-		assertEquals(" && ", parse(" && ", true).toExecutableDSL());
-		assertEquals(" && ",
-				parse(" && ", true).toExecutableDSL());
-		assertEquals(" && test-D", parse(" && D", true).toExecutableDSL());
-		assertEquals(">", parse(">", true).toExecutableDSL());
-		assertEquals(">", parse(">", true).toExecutableDSL());
+		assertThat(parse(" && ", true).toExecutableDSL()).isEqualTo(" && ");
+		assertThat(parse(" && ", true).toExecutableDSL()).isEqualTo(" && ");
+		assertThat(parse(" && D", true).toExecutableDSL()).isEqualTo(" && test-D");
+		assertThat(parse(">", true).toExecutableDSL()).isEqualTo(">");
+		assertThat(parse(">", true).toExecutableDSL()).isEqualTo(">");
 
 		ctn = parse("AAA 0->BBB");
-		List transitions = ((TaskAppNode) ((FlowNode) ctn.getSequences().get(0)).getSeriesElement(0))
+		List transitions = ((TaskAppNode) ctn.getSequences().get(0).getSeriesElement(0))
 				.getTransitions();
-		assertEquals("0", transitions.get(0).getStatusToCheckInDSLForm());
+		assertThat(transitions.get(0).getStatusToCheckInDSLForm()).isEqualTo("0");
 
 		ctn = parse("AAA '0'->BBB");
 		transitions = ((TaskAppNode) ((FlowNode) ctn.getSequences().get(0)).getSeriesElement(0)).getTransitions();
-		assertEquals("'0'", transitions.get(0).getStatusToCheckInDSLForm());
+		assertThat(transitions.get(0).getStatusToCheckInDSLForm()).isEqualTo("'0'");
 
 		ctn = parse("AAA *->BBB '*'->CCC");
 		transitions = ((TaskAppNode) ((FlowNode) ctn.getSequences().get(0)).getSeriesElement(0)).getTransitions();
-		assertEquals("*", transitions.get(0).getStatusToCheckInDSLForm());
-		assertEquals("'*'", transitions.get(1).getStatusToCheckInDSLForm());
+		assertThat(transitions.get(0).getStatusToCheckInDSLForm()).isEqualTo("*");
+		assertThat(transitions.get(1).getStatusToCheckInDSLForm()).isEqualTo("'*'");
 
-		assertEquals("test-AAA 'failed'->test-BBB *->test-CCC",
-				parse("AAA 'failed' -> BBB * -> CCC").toExecutableDSL());
-		assertEquals("test-AAA 'failed'->test-BBB '*'->test-CCC",
-				parse("AAA 'failed' -> BBB '*' -> CCC").toExecutableDSL());
-		assertEquals("test-AAA 1->test-BBB 2->test-CCC", parse("AAA 1 -> BBB 2 -> CCC").toExecutableDSL());
+		assertThat(parse("AAA 'failed' -> BBB * -> CCC").toExecutableDSL()).isEqualTo("test-AAA 'failed'->test-BBB *->test-CCC");
+		assertThat(parse("AAA 'failed' -> BBB '*' -> CCC").toExecutableDSL()).isEqualTo("test-AAA 'failed'->test-BBB '*'->test-CCC");
+		assertThat(parse("AAA 1 -> BBB 2 -> CCC").toExecutableDSL()).isEqualTo("test-AAA 1->test-BBB 2->test-CCC");
 	}
 
 	@Test
-	public void isComposedTask() {
+	void isComposedTask() {
 		ctn = parse("appA 'foo' -> appB");
-		assertTrue(ctn.isComposed());
-		assertNull(ctn.getTaskApp());
+		assertThat(ctn.isComposed()).isTrue();
+		assertThat(ctn.getTaskApp()).isNull();
 		assertGraph("[0:START][1:appA][2:appB][3:END][0-1][foo:1-2][1-3][2-3]", "appA 'foo' -> appB");
 		ctn = parse("appA");
-		assertFalse(ctn.isComposed());
-		assertNotNull(ctn.getTaskApp());
+		assertThat(ctn.isComposed()).isFalse();
+		assertThat(ctn.getTaskApp()).isNotNull();
 	}
 
 	@Test
-	public void basics() {
+	void basics() {
 		Tokens tokens = new TaskTokenizer().getTokens("App1");
 		assertToken(TokenKind.IDENTIFIER, "App1", 0, 4, tokens.next());
 		tokens = new TaskTokenizer().getTokens("App1 && App2");
@@ -418,7 +410,7 @@ public void basics() {
 	}
 
 	@Test
-	public void tokenStreams() {
+	void tokenStreams() {
 		Tokens tokens = new TaskTokenizer().getTokens("App1 0->App2 1->:Bar");
 		assertTokens(tokens, TokenKind.IDENTIFIER, TokenKind.IDENTIFIER,
 				TokenKind.ARROW, TokenKind.IDENTIFIER, TokenKind.IDENTIFIER,
@@ -430,37 +422,37 @@ public void tokenStreams() {
 	}
 
 	@Test
-	public void singleApp() {
+	void singleApp() {
 		ctn = parse("FooApp");
-		assertEquals("FooApp", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(6, ctn.getEndPos());
-		assertEquals("FooApp", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("FooApp");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(6);
+		assertThat(ctn.stringify()).isEqualTo("FooApp");
 		LabelledTaskNode node = ctn.getStart();
-		assertFalse(node.isSplit());
-		assertTrue(node.isFlow());
+		assertThat(node.isSplit()).isFalse();
+		assertThat(node.isFlow()).isTrue();
 		assertFlow(node, "FooApp");
-		assertTrue(((FlowNode) node).getSeriesElement(0).isTaskApp());
+		assertThat(((FlowNode) node).getSeriesElement(0).isTaskApp()).isTrue();
 	}
 
 	@Test
-	public void twoAppFlow() {
+	void twoAppFlow() {
 		ctn = parse("FooApp  &&  BarApp");
 
-		assertEquals("FooApp  &&  BarApp", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(18, ctn.getEndPos());
-		assertEquals("FooApp && BarApp", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("FooApp  &&  BarApp");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(18);
+		assertThat(ctn.stringify()).isEqualTo("FooApp && BarApp");
 
 		LabelledTaskNode node = ctn.getStart();
-		assertFalse(node.isSplit());
-		assertTrue(node.isFlow());
-		assertFalse(node.isTaskApp());
+		assertThat(node.isSplit()).isFalse();
+		assertThat(node.isFlow()).isTrue();
+		assertThat(node.isTaskApp()).isFalse();
 
 		FlowNode flow = (FlowNode) node;
 		List series = flow.getSeries();
-		assertEquals(2, series.size());
-		assertEquals(2, flow.getSeriesLength());
+		assertThat(series).hasSize(2);
+		assertThat(flow.getSeriesLength()).isEqualTo(2);
 		assertTaskApp(series.get(0), "FooApp");
 		assertTaskApp(flow.getSeriesElement(0), "FooApp");
 		assertTaskApp(series.get(1), "BarApp");
@@ -468,7 +460,7 @@ public void twoAppFlow() {
 	}
 
 	@Test
-	public void appsInTaskDef() {
+	void appsInTaskDef() {
 		ctn = parse("FooApp --p1=v1 --p2=v2");
 		ctn = parse("FooApp --p1=v1 --p2=v2 && BarApp --p3=v3");
 		ctn = parse("");
@@ -488,47 +480,47 @@ public void appsInTaskDef() {
 	}
 
 	@Test
-	public void oneAppSplit() {
+	void oneAppSplit() {
 		ctn = parse("< FooApp>");
 
-		assertEquals("< FooApp>", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(9, ctn.getEndPos());
-		assertEquals("", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("< FooApp>");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(9);
+		assertThat(ctn.stringify()).isEqualTo("");
 
 		LabelledTaskNode node = ctn.getStart();
-		assertTrue(node.isFlow());
+		assertThat(node.isFlow()).isTrue();
 		node = ((FlowNode) node).getSeriesElement(0);
-		assertTrue(node.isSplit());
-		assertFalse(node.isTaskApp());
+		assertThat(node.isSplit()).isTrue();
+		assertThat(node.isTaskApp()).isFalse();
 
 		SplitNode split = (SplitNode) node;
 		List series = split.getSeries();
-		assertEquals(1, series.size());
-		assertEquals(1, split.getSeriesLength());
+		assertThat(series).hasSize(1);
+		assertThat(split.getSeriesLength()).isEqualTo(1);
 		assertFlow(series.get(0), "FooApp");
 		assertFlow(split.getSeriesElement(0), "FooApp");
 	}
 
 	@Test
-	public void twoAppSplit() {
+	void twoAppSplit() {
 		ctn = parse("< FooApp  ||    BarApp>");
 
-		assertEquals("< FooApp  ||    BarApp>", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(23, ctn.getEndPos());
-		assertEquals("", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("< FooApp  ||    BarApp>");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(23);
+		assertThat(ctn.stringify()).isEqualTo("");
 
 		LabelledTaskNode node = ctn.getStart();
-		assertTrue(node.isFlow());
+		assertThat(node.isFlow()).isTrue();
 		node = ((FlowNode) node).getSeriesElement(0);
-		assertTrue(node.isSplit());
-		assertFalse(node.isTaskApp());
+		assertThat(node.isSplit()).isTrue();
+		assertThat(node.isTaskApp()).isFalse();
 
 		SplitNode split = (SplitNode) node;
 		List series = split.getSeries();
-		assertEquals(2, series.size());
-		assertEquals(2, split.getSeriesLength());
+		assertThat(series).hasSize(2);
+		assertThat(split.getSeriesLength()).isEqualTo(2);
 		assertFlow(series.get(0), "FooApp");
 		assertFlow(split.getSeriesElement(0), "FooApp");
 		assertFlow(series.get(1), "BarApp");
@@ -536,99 +528,99 @@ public void twoAppSplit() {
 	}
 
 	@Test
-	public void appWithOneTransition() {
+	void appWithOneTransition() {
 		ctn = parse("App1 0->App2");
-		assertEquals("test", ctn.getName());
-		assertEquals("App1 0->App2", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(12, ctn.getEndPos());
-		assertEquals("App1 0->App2", ctn.stringify());
+		assertThat(ctn.getName()).isEqualTo("test");
+		assertThat(ctn.getTaskText()).isEqualTo("App1 0->App2");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(12);
+		assertThat(ctn.stringify()).isEqualTo("App1 0->App2");
 		LabelledTaskNode firstNode = ctn.getStart();
-		assertTrue(firstNode.isFlow());
+		assertThat(firstNode.isFlow()).isTrue();
 		List transitions = ((TaskAppNode) ((FlowNode) firstNode).getSeriesElement(0)).getTransitions();
-		assertEquals(1, transitions.size());
+		assertThat(transitions).hasSize(1);
 		TransitionNode transition = transitions.get(0);
-		assertEquals("0", transition.getStatusToCheck());
-		assertEquals("App2", transition.getTargetDslText());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(12, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("0");
+		assertThat(transition.getTargetDslText()).isEqualTo("App2");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(12);
 	}
 
 	@Test
-	public void appWithTwoTransitions() {
+	void appWithTwoTransitions() {
 		ctn = parse("App1 0->App2 'abc' ->   App3");
-		assertEquals("App1 0->App2 'abc' ->   App3", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(28, ctn.getEndPos());
-		assertEquals("App1 0->App2 'abc'->App3", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("App1 0->App2 'abc' ->   App3");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(28);
+		assertThat(ctn.stringify()).isEqualTo("App1 0->App2 'abc'->App3");
 		LabelledTaskNode node = ctn.getStart();
-		assertTrue(node.isFlow());
+		assertThat(node.isFlow()).isTrue();
 		node = ((FlowNode) node).getSeriesElement(0);
 		List transitions = ((TaskAppNode) node).getTransitions();
-		assertEquals(2, transitions.size());
+		assertThat(transitions).hasSize(2);
 		TransitionNode transition = transitions.get(0);
-		assertEquals("0", transition.getStatusToCheck());
-		assertTrue(transition.isExitCodeCheck());
-		assertEquals("App2", transition.getTargetDslText());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(12, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("0");
+		assertThat(transition.isExitCodeCheck()).isTrue();
+		assertThat(transition.getTargetDslText()).isEqualTo("App2");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(12);
 		transition = transitions.get(1);
-		assertEquals("abc", transition.getStatusToCheck());
-		assertFalse(transition.isExitCodeCheck());
-		assertEquals("App3", transition.getTargetDslText());
-		assertEquals(13, transition.getStartPos());
-		assertEquals(28, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("abc");
+		assertThat(transition.isExitCodeCheck()).isFalse();
+		assertThat(transition.getTargetDslText()).isEqualTo("App3");
+		assertThat(transition.getStartPos()).isEqualTo(13);
+		assertThat(transition.getEndPos()).isEqualTo(28);
 	}
 
 	@Test
-	public void appWithWildcardTransitions() {
+	void appWithWildcardTransitions() {
 		ctn = parse("App1 *->App2 '*'->App3");
-		assertEquals("App1 *->App2 '*'->App3", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(22, ctn.getEndPos());
-		assertEquals("App1 *->App2 '*'->App3", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("App1 *->App2 '*'->App3");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(22);
+		assertThat(ctn.stringify()).isEqualTo("App1 *->App2 '*'->App3");
 		LabelledTaskNode node = ctn.getStart();
 		node = ((FlowNode) node).getSeriesElement(0);
-		assertTrue(node.isTaskApp());
+		assertThat(node.isTaskApp()).isTrue();
 		List transitions = ((TaskAppNode) node).getTransitions();
-		assertEquals(2, transitions.size());
+		assertThat(transitions).hasSize(2);
 
 		TransitionNode transition = transitions.get(0);
-		assertEquals("*", transition.getStatusToCheck());
-		assertTrue(transition.isExitCodeCheck());
-		assertEquals("App2", transition.getTargetDslText());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(12, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("*");
+		assertThat(transition.isExitCodeCheck()).isTrue();
+		assertThat(transition.getTargetDslText()).isEqualTo("App2");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(12);
 		transition = transitions.get(1);
-		assertEquals("*", transition.getStatusToCheck());
-		assertFalse(transition.isExitCodeCheck());
-		assertEquals("App3", transition.getTargetDslText());
-		assertEquals(13, transition.getStartPos());
-		assertEquals(22, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("*");
+		assertThat(transition.isExitCodeCheck()).isFalse();
+		assertThat(transition.getTargetDslText()).isEqualTo("App3");
+		assertThat(transition.getStartPos()).isEqualTo(13);
+		assertThat(transition.getEndPos()).isEqualTo(22);
 	}
 
 	@Test
-	public void appWithLabelReferenceTransition() {
+	void appWithLabelReferenceTransition() {
 		ctn = parse("App1 'foo'->:something", false);
-		assertEquals("App1 'foo'->:something", ctn.getTaskText());
-		assertEquals(0, ctn.getStartPos());
-		assertEquals(22, ctn.getEndPos());
-		assertEquals("App1 'foo'->:something", ctn.stringify());
+		assertThat(ctn.getTaskText()).isEqualTo("App1 'foo'->:something");
+		assertThat(ctn.getStartPos()).isEqualTo(0);
+		assertThat(ctn.getEndPos()).isEqualTo(22);
+		assertThat(ctn.stringify()).isEqualTo("App1 'foo'->:something");
 		LabelledTaskNode firstNode = ctn.getStart();
 		assertFlow(firstNode, "App1");
 		List transitions = ((TaskAppNode) ((FlowNode) firstNode).getSeriesElement(0)).getTransitions();
-		assertEquals(1, transitions.size());
+		assertThat(transitions).hasSize(1);
 		TransitionNode transition = transitions.get(0);
-		assertEquals("foo", transition.getStatusToCheck());
-		assertFalse(transition.isExitCodeCheck());
-		assertEquals(":something", transition.getTargetDslText());
-		assertEquals("something", transition.getTargetLabel());
-		assertEquals(5, transition.getStartPos());
-		assertEquals(22, transition.getEndPos());
+		assertThat(transition.getStatusToCheck()).isEqualTo("foo");
+		assertThat(transition.isExitCodeCheck()).isFalse();
+		assertThat(transition.getTargetDslText()).isEqualTo(":something");
+		assertThat(transition.getTargetLabel()).isEqualTo("something");
+		assertThat(transition.getStartPos()).isEqualTo(5);
+		assertThat(transition.getEndPos()).isEqualTo(22);
 	}
 
 	@Test
-	public void splitMainComposedTaskOverMultipleLines() {
+	void splitMainComposedTaskOverMultipleLines() {
 		ctn = parse("FooApp &&\nBarApp");
 		assertFlow(ctn.getStart(), "FooApp", "BarApp");
 		ctn = parse("FooApp\n&& BarApp");
@@ -640,34 +632,34 @@ public void splitMainComposedTaskOverMultipleLines() {
 		ctn = parse("FooApp\n 0\n->:a\n 1->:b\n &&\nBarApp 2->:c 3->:d", false);
 		assertFlow(ctn.getStart(), "FooApp", "BarApp");
 		ctn = parse("");
-		assertSplit(((FlowNode) ctn.getStart()).getSeriesElement(0), "FooApp", "BarApp");
+		assertSplit(ctn.getStart().getSeriesElement(0), "FooApp", "BarApp");
 		ctn = parse("<\nFooApp ||\nBarApp\n>");
-		assertSplit(((FlowNode) ctn.getStart()).getSeriesElement(0), "FooApp", "BarApp");
+		assertSplit(ctn.getStart().getSeriesElement(0), "FooApp", "BarApp");
 	}
 
 	@Test
-	public void labelledElement() {
+	void labelledElement() {
 		ctn = parse("foo: appA");
 		LabelledTaskNode start = ctn.getStart();
-		assertEquals("foo", start.getLabelString());
+		assertThat(start.getLabelString()).isEqualTo("foo");
 		FlowNode f = (FlowNode) start;
-		assertEquals("foo", f.getLabelString());
-		assertEquals("appA", ((TaskAppNode) f.getSeriesElement(0)).getName());
+		assertThat(f.getLabelString()).isEqualTo("foo");
+		assertThat(((TaskAppNode) f.getSeriesElement(0)).getName()).isEqualTo("appA");
 
 		ctn = parse("foo: ");
 		start = ctn.getStart();
-		assertEquals("foo", start.getLabelString());
+		assertThat(start.getLabelString()).isEqualTo("foo");
 		SplitNode s = (SplitNode) ((FlowNode) start).getSeriesElement(0);
 		assertSplit(s, "appA", "appB");
 
 		ctn = parse("foo: appA && appB");
 		start = ctn.getStart();
-		assertEquals("foo", start.getLabelString());
+		assertThat(start.getLabelString()).isEqualTo("foo");
 		assertFlow(start, "appA", "appB");
 	}
 
 	@Test
-	public void taskCollectorVisitor() {
+	void taskCollectorVisitor() {
 		assertApps(parse("appA").getTaskApps(), "appA");
 		assertApps(parse("appA && appB && appC").getTaskApps(), "appA", "appB", "appC");
 		assertApps(parse(" && appC").getTaskApps(), "appA", "appB", "appC");
@@ -677,36 +669,36 @@ public void taskCollectorVisitor() {
 	}
 
 	@Test
-	public void transitionToOtherSequence() {
+	void transitionToOtherSequence() {
 		String spec = " appA 'fail'->:two && appB && appC;two: appD && appE";
 		assertGraph("[0:START][1:appA][2:appB][3:appC][4:END][9:appD][10:appE]"
 				+ "[0-1][1-2][2-3][3-4][fail:1-9][9-10][10-4]", spec);
 	}
 
 	@Test
-	public void singleSplitToGraph() {
+	void singleSplitToGraph() {
 		String spec = " appB>";
 		assertGraph("[0:START][1:appA][2:appB][3:END]"
 				+ "[0-1][fail:1-2][1-3][2-3]", spec);
 	}
 
 	@Test
-	public void secondarySequencesHaveFurtherTransitions() {
+	void secondarySequencesHaveFurtherTransitions() {
 		String spec = " appA 'fail'->:two && appB;two: appD 'fail2'->:three && appE;three: appF && appG";
 		assertGraph("[0:START][1:appA][2:appB][3:END][12:appD][13:appE][14:appF][15:appG]"
 				+ "[0-1][1-2][2-3][fail:1-12][12-13][13-3][fail2:12-14][14-15][15-3]", spec);
 	}
 
 	@Test
-	public void twoReferencesToSecondarySequence() {
+	void twoReferencesToSecondarySequence() {
 		String spec = "appA 'fail'->:two && appB 'fail2'->:two && appC;two: appD && appE";
 		assertGraph("[0:START][1:appA][2:appB][3:appC][4:END][9:appD][10:appE]"
 				+ "[0-1][1-2][2-3][3-4][fail:1-9][fail2:2-9][9-10][10-4]", spec);
 	}
 
-	@Ignore
+	@Disabled ("Transition out of flow is incorrect. Verify test or parser.")
 	@Test
-	public void transitionToSplit() {
+	void transitionToSplit() {
 		String spec = "aa 'foo'->:split && bb && split:  && ee";
 		// lets consider this a limitation for now.
 		assertGraph("[0:START][1:aa][2:bb][3:cc][4:dd][5:ee][6:END]" + "[0-1][1-2]['foo':1-3][2-3][2-4][3-5][4-5][5-6]",
@@ -714,200 +706,192 @@ public void transitionToSplit() {
 	}
 
 	@Test
-	public void transitionToNonResolvedLabel() {
+	void transitionToNonResolvedLabel() {
 		String spec = "aa 'foo'->:split && bb && cc";
 		TaskNode ctn = parse(spec, false);
 		List validationProblems = ctn.validate();
-		assertEquals(1, validationProblems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED,
-				validationProblems.get(0).getMessage());
-		assertEquals(3, validationProblems.get(0).getOffset());
+		assertThat(validationProblems).hasSize(1);
+		assertThat(validationProblems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED);
+		assertThat(validationProblems.get(0).getOffset()).isEqualTo(3);
 
 		spec = ":split && bb && cc || dd>";
 		ctn = parse(spec, false);
 		validationProblems = ctn.validate();
-		assertEquals(1, validationProblems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED,
-				validationProblems.get(0).getMessage());
-		assertEquals(4, validationProblems.get(0).getOffset());
+		assertThat(validationProblems).hasSize(1);
+		assertThat(validationProblems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_TRANSITION_TARGET_LABEL_UNDEFINED);
+		assertThat(validationProblems.get(0).getOffset()).isEqualTo(4);
 	}
 
 	@Test
-	public void visitors() {
+	void visitors() {
 		ctn = parse("appA");
 		TestVisitor tv = new TestVisitor();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] SN[0] >F =F >TA =TA[appA] SN[foo: 0] >F =F[foo:] >TA =TA[foo: appA] SN[foo: 0] >F =F[foo:] >TA =TA[foo: appA] SN[0] >F =F >TA =TA[appA] TA =TA[appB] SN[0] >F =F >TA =TA[appA] TA =TA[appB] ");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >S =S >F =F >TA =TA[appA] F =F >TA =TA[appB] SN[0] >F =F >S =S >F =F >TA =TA[appA] F =F >TA =TA[appB] ");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >S =S >F =F >TA =TA[appA] TA =TA[appB] F =F >TA =TA[appC] SN[0] >F =F >S =S >F =F >TA =TA[appA] TA =TA[appB] F =F >TA =TA[appC] :foo", false);
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] appB");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] >T =T[0->appB] SN[0] >F =F >TA =TA[appA] >T =T[0->appB] SN[0] >F =F >TA =TA[appA] SN[1] >F =F >TA =TA[appB] SN[0] >F =F >TA =TA[appA] SN[1] >F =F >TA =TA[appB] :foo *->appC;foo: appD && appE", false);
 		assertApps(ctn.getTaskApps(), "appA", "appB", "appC", "foo:appD", "appE");
 		tv.reset();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] TA =TA[appB] >T =T[0->:foo] T =T[*->appC] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appD] TA =TA[appE] SN[0] >F =F >TA =TA[appA] TA =TA[appB] >T =T[0->:foo] T =T[*->appC] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appD] TA =TA[appE] :label1 && appB\nlabel1: appC");
 	}
 
 	@Test
-	public void multiSequence() {
+	void multiSequence() {
 		TaskNode ctn = parse("appA\n  0->:foo\n  *->appB\n  && appE;foo: appC && appD");
 		LabelledTaskNode start = ctn.getStart(); // get the root of the AST starting appA
-		assertNotNull(start);
+		assertThat(start).isNotNull();
 		List sequences = ctn.getSequences();
 		LabelledTaskNode labelledTaskNode = sequences.get(1);
-		assertEquals("foo", labelledTaskNode.getLabelString());
+		assertThat(labelledTaskNode.getLabelString()).isEqualTo("foo");
 		LabelledTaskNode fooSequence = ctn.getSequenceWithLabel("foo"); // get the AST for foo: ...
-		assertNotNull(fooSequence);
+		assertThat(fooSequence).isNotNull();
 		TestVisitor tv = new TestVisitor();
 		ctn.accept(tv);
-		assertEquals(">SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] T =T[*->appB] TA =TA[appE] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appC] TA =TA[appD] SN[0] >F =F >TA =TA[appA] >T =T[0->:foo] T =T[*->appB] TA =TA[appE] SN[foo: 1] >F =F[foo:] >TA =TA[foo: appC] TA =TA[appD]  problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED, problems.get(0).getMessage());
-		assertEquals(5, problems.get(0).getOffset());
-		assertEquals("158E:(pos 5): secondary sequences must have labels or are unreachable",
-				problems.get(0).toString());
-		assertEquals("158E:(pos 5): secondary sequences must have labels or are unreachable\nappA;appB\n     ^\n",
-				problems.get(0).toStringWithContext());
+		assertThat(problems).hasSize(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED);
+		assertThat(problems.get(0).getOffset()).isEqualTo(5);
+		assertThat(problems.get(0).toString()).isEqualTo("158E:(pos 5): secondary sequences must have labels or are unreachable");
+		assertThat(problems.get(0).toStringWithContext()).isEqualTo("158E:(pos 5): secondary sequences must have labels or are unreachable\nappA;appB\n     ^\n");
 
 		validator.reset();
 		ctn = parse("appA;foo: appB");
 		ctn.accept(validator);
-		assertFalse(validator.hasProblems());
+		assertThat(validator.hasProblems()).isFalse();
 
 		validator.reset();
 		ctn = parse("appA;foo: appB\nappC", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED, problems.get(0).getMessage());
-		assertEquals(15, problems.get(0).getOffset());
-		assertEquals("158E:(pos 15): secondary sequences must have labels or are unreachable",
-				problems.get(0).toString());
-		assertEquals("158E:(pos 15): secondary sequences must have labels or are unreachable\nappC\n^\n",
-				problems.get(0).toStringWithContext());
+		assertThat(problems).hasSize(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_SECONDARY_SEQUENCES_MUST_BE_NAMED);
+		assertThat(problems.get(0).getOffset()).isEqualTo(15);
+		assertThat(problems.get(0).toString()).isEqualTo("158E:(pos 15): secondary sequences must have labels or are unreachable");
+		assertThat(problems.get(0).toStringWithContext()).isEqualTo("158E:(pos 15): secondary sequences must have labels or are unreachable\nappC\n^\n");
 
 		validator.reset();
 		ctn = parse("appA && appA", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, problems.get(0).getMessage());
-		assertEquals(8, problems.get(0).getOffset());
+		assertThat(problems).hasSize(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE);
+		assertThat(problems.get(0).getOffset()).isEqualTo(8);
 		validator.reset();
 		ctn = parse("appA 'foo' -> appA", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, problems.get(0).getMessage());
-		assertEquals(14, problems.get(0).getOffset());
+		assertThat(problems).hasSize(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE);
+		assertThat(problems.get(0).getOffset()).isEqualTo(14);
 		validator.reset();
 		ctn = parse("appA 'foo' -> appA: appB", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_LABEL_CLASHES_WITH_TASKAPP_NAME, problems.get(0).getMessage());
-		assertEquals(14, problems.get(0).getOffset());
+		assertThat(problems).hasSize(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_LABEL_CLASHES_WITH_TASKAPP_NAME);
+		assertThat(problems.get(0).getOffset()).isEqualTo(14);
 		validator.reset();
 		ctn = parse("label1: appA 'foo' -> label1: appB", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_DUPLICATE_LABEL, problems.get(0).getMessage());
-		assertEquals(22, problems.get(0).getOffset());
+		assertThat(problems).hasSize(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_DUPLICATE_LABEL);
+		assertThat(problems.get(0).getOffset()).isEqualTo(22);
 		validator.reset();
 		ctn = parse("label1: appA 'foo' -> label1", false);
 		ctn.accept(validator);
 		problems = validator.getProblems();
-		assertEquals(1, problems.size());
-		assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_CLASHES_WITH_LABEL, problems.get(0).getMessage());
-		assertEquals(22, problems.get(0).getOffset());
+		assertThat(problems).hasSize(1);
+		assertThat(problems.get(0).getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_CLASHES_WITH_LABEL);
+		assertThat(problems.get(0).getOffset()).isEqualTo(22);
 	}
 
 	@Test
-	public void labels() {
+	void labels() {
 		// basic task
 		ctn = parse("aaa: appA");
 		LabelledTaskNode flow = ctn.getStart();
-		assertEquals("aaa", flow.getLabelString());
+		assertThat(flow.getLabelString()).isEqualTo("aaa");
 		TaskAppNode taskApp = (TaskAppNode) ((FlowNode) flow).getSeriesElement(0);
-		assertEquals("aaa", taskApp.getLabelString());
+		assertThat(taskApp.getLabelString()).isEqualTo("aaa");
 
 		// flows
 		ctn = parse("aaa: appA && bbb: appB");
 		taskApp = (TaskAppNode) ((FlowNode) ctn.getStart()).getSeriesElement(1);
-		assertEquals("bbb", taskApp.getLabelString());
+		assertThat(taskApp.getLabelString()).isEqualTo("bbb");
 
 		// splits
 		ctn = parse("outer:");
 		flow = (FlowNode) ctn.getStart();
-		assertEquals("outer", flow.getLabelString());
+		assertThat(flow.getLabelString()).isEqualTo("outer");
 		SplitNode s = (SplitNode) flow.getSeriesElement(0);
-		assertEquals("outer", s.getLabelString());
+		assertThat(s.getLabelString()).isEqualTo("outer");
 		taskApp = (TaskAppNode) (((FlowNode) s.getSeriesElement(0)).getSeriesElement(0));
-		assertEquals("aaa", taskApp.getLabelString());
+		assertThat(taskApp.getLabelString()).isEqualTo("aaa");
 		taskApp = (TaskAppNode) (((FlowNode) s.getSeriesElement(1)).getSeriesElement(0));
-		assertEquals("bbb", taskApp.getLabelString());
+		assertThat(taskApp.getLabelString()).isEqualTo("bbb");
 
 		// parentheses
 		ctn = parse("(aaa: appA && appB)");
 		taskApp = (TaskAppNode) ((FlowNode) ctn.getStart()).getSeriesElement(0);
-		assertEquals("aaa", taskApp.getLabelString());
+		assertThat(taskApp.getLabelString()).isEqualTo("aaa");
 
 		checkForParseError("aaa: (appA)", DSLMessage.TASK_NO_LABELS_ON_PARENS, 5);
 		checkForParseError("aaa: bbb: appA", DSLMessage.NO_DOUBLE_LABELS, 5);
@@ -917,7 +901,7 @@ public void labels() {
 	}
 
 	@Test
-	public void badTransitions() {
+	void badTransitions() {
 		checkForParseError("App1 ->", DSLMessage.TASK_ARROW_SHOULD_BE_PRECEDED_BY_CODE, 5);
 		checkForParseError("App1 0->x ->", DSLMessage.TASK_ARROW_SHOULD_BE_PRECEDED_BY_CODE, 10);
 		checkForParseError("App1 ->xx", DSLMessage.TASK_ARROW_SHOULD_BE_PRECEDED_BY_CODE, 5);
@@ -925,7 +909,7 @@ public void badTransitions() {
 	}
 
 	@Test
-	public void graphToText_1712() {
+	void graphToText_1712() {
 		assertGraph("[0:START][1:timestamp][2:END][0-1][1-2]", "timestamp");
 		// In issue 1712 the addition of an empty properties map to the link damages the
 		// generation of the DSL. It was expecting null if there are no properties.
@@ -937,11 +921,11 @@ public void graphToText_1712() {
 		graph.nodes.get(2).metadata = new HashMap<>();
 		graph.links.get(0).properties = new HashMap<>();
 		graph.links.get(1).properties = new HashMap<>();
-		assertEquals("timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("timestamp");
 	}
-	
+
 	@Test
-	public void graphToText_3667() {
+	void graphToText_3667() {
 		assertGraph("[0:START][1:sql-executor-task:password=password:url=jdbc:postgresql://127.0.0.1:5432/postgres:script-location=/dataflow/scripts/test.sql:username=postgres]"+
 					"[2:END][0-1][1-2]","sql-executor-task --script-location=/dataflow/scripts/test.sql --username=postgres --password=password --url=jdbc:postgresql://127.0.0.1:5432/postgres");
 		
@@ -950,46 +934,46 @@ public void graphToText_3667() {
 
 		TaskNode ctn = parse("t1: timestamp 'FAILED'->t2: timestamp && t3: timestamp");
 		Graph graph = ctn.toGraph();
-		assertEquals("t1: timestamp 'FAILED'->t2: timestamp && t3: timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("t1: timestamp 'FAILED'->t2: timestamp && t3: timestamp");
 		
 		ctn = parse("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp && t3: timestamp --format=gghhii");
 		graph = ctn.toGraph();
-		assertEquals("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp && t3: timestamp --format=gghhii", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp && t3: timestamp --format=gghhii");
 
 		ctn = parse("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp --format=ddeeff && t3: timestamp --format=gghhii");
 		graph = ctn.toGraph();
 		Node node = graph.nodes.get(2);
-		assertEquals("ddeeff",node.properties.get("format"));
-		assertEquals("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp --format=ddeeff && t3: timestamp --format=gghhii", graph.toDSLText());
+		assertThat(node.properties).containsEntry("format", "ddeeff");
+		assertThat(graph.toDSLText()).isEqualTo("t1: timestamp --format=aabbcc 'FAILED'->t2: timestamp --format=ddeeff && t3: timestamp --format=gghhii");
 		
 		assertGraph("[0:START][1:eee:timestamp:format=ttt][2:QQQQQ:timestamp:format=NOT-IN-TEXT][3:ooo:timestamp:format=yyyy][4:END][0-1][FAILED:1-2][1-3][3-4][2-4]",
 				    "eee: timestamp --format=ttt 'FAILED'->QQQQQ: timestamp --format=NOT-IN-TEXT && ooo: timestamp --format=yyyy");
 	}
-	
+
 	@Test
-	public void graphToTextSingleAppInSplit() {
+	void graphToTextSingleAppInSplit() {
 		// Note the graph here does not include anything special
 		// to preserve the split because the split is unnecessary
 		// and is removed when the text is recomputed for it.
 		assertGraph("[0:START][1:AppA][2:END][0-1][1-2]","");
 		TaskNode ctn = parse("");
 		Graph graph = ctn.toGraph();
-		assertEquals("AppA", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppA");
 		
 		assertGraph("[0:START][1:AppA][2:AppB][3:END][0-1][1-2][2-3]"," && AppB");
 		ctn = parse(" && AppB");
 		graph = ctn.toGraph();
-		assertEquals("AppA && AppB", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppA && AppB");
 		
 		assertGraph("[0:START][1:AppA][2:AppC][3:AppB][4:END][0-1][99:1-2][1-3][2-3][3-4]"," AppC> && AppB");
 		ctn = parse("AppC> && AppB");
 		graph = ctn.toGraph();
-		assertEquals("AppC> && AppB", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppC> && AppB");
 
 		// Check it still does the right thing when the split does have multple:
 		ctn = parse("AppC || AppD> && AppB");
 		graph = ctn.toGraph();
-		assertEquals("AppC || AppD> && AppB", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("AppC || AppD> && AppB");
 		
 		// This is the test specifically for issue 3263
 		ctn = parse("T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp");
@@ -998,7 +982,7 @@ public void graphToTextSingleAppInSplit() {
 		assertGraph("[0:START][1:Import:timestamp][2:T2:timestamp][3:T1:timestamp][4:Backwards:timestamp][5:END][0-1][Error2:1-2][Error:1-3][1-4][2-4][3-4][4-5]",
 			"T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp");
 		graph = ctn.toGraph();
-		assertEquals("T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("T2: timestamp 'Error'->T1: timestamp> && Backwards: timestamp");
 		
 		// This is the variant of the above without the <...>
 		// Now notice the links from the transition nodes go direct to END
@@ -1006,11 +990,11 @@ public void graphToTextSingleAppInSplit() {
 		assertGraph("[0:START][1:Import:timestamp][2:T2:timestamp][3:T1:timestamp][4:Backwards:timestamp][5:END][0-1][Error2:1-2][Error:1-3][1-4][4-5][2-5][3-5]",
 			"Import: timestamp 'Error2'->T2: timestamp 'Error'->T1: timestamp && Backwards: timestamp");
 		graph = ctn.toGraph();
-		assertEquals("Import: timestamp 'Error2'->T2: timestamp 'Error'->T1: timestamp && Backwards: timestamp", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("Import: timestamp 'Error2'->T2: timestamp 'Error'->T1: timestamp && Backwards: timestamp");
 	}
 
 	@Test
-	public void graphToText() {
+	void graphToText() {
 		assertGraph("[0:START][1:AppA][2:END][0-1][1-2]", "AppA");
 		checkDSLToGraphAndBackToDSL("AppA");
 		assertGraph("[0:START][1:AppA][2:AppB][3:END][0-1][1-2][2-3]", "AppA && AppB");
@@ -1048,7 +1032,7 @@ public void graphToText() {
 	}
 
 	@Test
-	public void textToGraphWithTransitions() {
+	void textToGraphWithTransitions() {
 		assertGraph("[0:START][1:AppA][2:AppE][3:AppB][4:END][0-1][0:1-2][1-3][3-4][2-4]", "AppA 0->AppE && AppB");
 		checkDSLToGraphAndBackToDSL("AppA 0->AppE && AppB");
 		assertGraph("[0:START][1:AppA][2:AppE][3:AppB][4:AppC][5:END][0-1][0:1-2][1-3][3-4][4-5][2-5]",
@@ -1062,13 +1046,13 @@ public void textToGraphWithTransitions() {
 	}
 
 	@Test
-	public void graphToTextSplitWithTransition() {
+	void graphToTextSplitWithTransition() {
 		checkDSLToGraphAndBackToDSL("Kill || Bar>");
 		checkDSLToGraphAndBackToDSL("Kill || AppB> && AppC");
 	}
 
 	@Test
-	public void toDSLTextNestedSplits() {
+	void toDSLTextNestedSplits() {
 		checkDSLToGraphAndBackToDSL(" && eee");
 		checkDSLToGraphAndBackToDSL("> && eee");
 		checkDSLToGraphAndBackToDSL(" && foo || ddd && eee> && fff");
@@ -1079,63 +1063,60 @@ public void toDSLTextNestedSplits() {
 	}
 
 	@Test
-	public void errorExpectDoubleOr() {
+	void errorExpectDoubleOr() {
 		checkForParseError("", DSLMessage.TASK_DOUBLE_OR_REQUIRED, 4);
 		checkForParseError("", DSLMessage.TASK_DOUBLE_OR_REQUIRED, 6);
 	}
 
 	@Test
-	public void modeError() {
+	void modeError() {
 		try {
 			new TaskParser("foo", "appA --p1=v1", false, true).parse();
-			fail();
+			fail("");
 		}
 		catch (CheckPointedParseException cppe) {
-			assertEquals(DSLMessage.TASK_ARGUMENTS_NOT_ALLOWED_UNLESS_IN_APP_MODE, cppe.message);
+			assertThat(cppe.message).isEqualTo(DSLMessage.TASK_ARGUMENTS_NOT_ALLOWED_UNLESS_IN_APP_MODE);
 		}
-		try {
+		assertDoesNotThrow(() -> {
 			new TaskParser("foo", "appA --p1=v1", true, true).parse();
-		}
-		catch (CheckPointedParseException cppe) {
-			fail();
-		}
+		});
 	}
 
 	@Test
-	public void unexpectedDoubleAnd() {
+	void unexpectedDoubleAnd() {
 		checkForParseError("aa  &&&& bb", DSLMessage.EXPECTED_APPNAME, 6, "&&");
 	}
 
 	@Test
-	public void toDSLTextTransitions() {
+	void toDSLTextTransitions() {
 		// [SHOULD-VALIDATE] There is no real route to bbb
 		String spec = "aaa '*'->$END && bbb";
-		assertEquals(spec, parse(spec).toDSL());
+		assertThat(parse(spec).toDSL()).isEqualTo(spec);
 		assertGraph("[0:START][1:aaa][2:$END][3:bbb][4:END]" + "[0-1][*:1-2][1-3][3-4]", spec);
 		checkDSLToGraphAndBackToDSL(spec);
 	}
 
-	@Test
 	// You can't draw this on the graph, it would end up looking like "aaa | '*' = $END ||
 	// bbb || ccc
-	public void toDSLTextTransitionsSplit() {
+	@Test
+	void toDSLTextTransitionsSplit() {
 		checkDSLToGraphAndBackToDSL("aaa '*'->$END && ");
 	}
 
 	@Test
-	public void toDSLTextTransitionsFlow() {
+	void toDSLTextTransitionsFlow() {
 		checkDSLToGraphAndBackToDSL("aaa '*'->$END && bbb && ccc");
 	}
 
 	@Test
-	public void toDSLTextSplitFlowSplit() {
+	void toDSLTextSplitFlowSplit() {
 		checkDSLToGraphAndBackToDSL(" && foo && ");
 		checkDSLToGraphAndBackToDSL(" && foo 'wibble'->$END && ");
 		checkDSLToGraphAndBackToDSL(" && foo 'wibble'->$FAIL && ");
 	}
 
 	@Test
-	public void toDSLTextFlowTransitions() {
+	void toDSLTextFlowTransitions() {
 		checkDSLToGraphAndBackToDSL("aaa 'COMPLETED'->kill1 'FOO'->kill2");
 		checkDSLToGraphAndBackToDSL("aaa 'COMPLETED'->kill && bbb && ccc");
 		checkDSLToGraphAndBackToDSL("aaa 'COMPLETED'->kill1 && bbb 'COMPLETED'->kill2 && ccc");
@@ -1143,39 +1124,39 @@ public void toDSLTextFlowTransitions() {
 	}
 
 	@Test
-	public void toDSLTextSplitTransitions() {
+	void toDSLTextSplitTransitions() {
 		checkDSLToGraphAndBackToDSL("kill || bbb> && ccc");
 	}
 
 	@Test
-	public void toDSLTextLong() {
+	void toDSLTextLong() {
 		checkDSLToGraphAndBackToDSL(
 				"> && eee && hhh && iii && ");
 	}
 
 	@Test
-	public void syncBetweenSplits() {
+	void syncBetweenSplits() {
 		String spec = " && ";
 		checkDSLToGraphAndBackToDSL(spec);
 		assertGraph("[0:START][1:a][2:b][3:SYNC][4:c][5:d][6:END]" + "[0-1][0-2][1-3][2-3][3-4][3-5][4-6][5-6]", spec);
 	}
 
 	@Test
-	public void toDSLTextManualSync() {
+	void toDSLTextManualSync() {
 		// Here foo is effectively acting as a SYNC node
 		String spec = " && foo && ";
 		checkDSLToGraphAndBackToDSL(spec);
 	}
 
 	@Test
-	public void whitespace() {
-		assertEquals("A && B", parse("A&&B").stringify());
-		assertEquals("", parse("").stringify());
-		assertEquals("", parse("").stringify());
+	void whitespace() {
+		assertThat(parse("A&&B").stringify()).isEqualTo("A && B");
+		assertThat(parse("").stringify()).isEqualTo("");
+		assertThat(parse("").stringify()).isEqualTo("");
 	}
 
 	@Test
-	public void endTransition() {
+	void endTransition() {
 		String spec = "aaa 'broken'->$END";
 		assertGraph("[0:START][1:aaa][2:$END][3:END][0-1][broken:1-2][1-3]", spec);
 		checkDSLToGraphAndBackToDSL(spec);
@@ -1183,20 +1164,20 @@ public void endTransition() {
 
 	// TODO not quoted state transition names
 	@Test
-	public void missingQuotes() {
+	void missingQuotes() {
 		checkForParseError("appA BROKEN->$FAIL", DSLMessage.TASK_UNQUOTED_TRANSITION_CHECK_MUST_BE_NUMBER, 5, "BROKEN");
 		checkForParseError("appA\n BROKEN->$FAIL", DSLMessage.TASK_UNQUOTED_TRANSITION_CHECK_MUST_BE_NUMBER, 6,
 				"BROKEN");
 	}
 
 	@Test
-	public void parentheses2() {
+	void parentheses2() {
 		TaskNode ctn = parse("<(jobA && jobB && jobC) || boo: jobC>");
-		assertEquals("", ctn.stringify());
+		assertThat(ctn.stringify()).isEqualTo("");
 	}
 
 	@Test
-	public void funnyJobNames() {
+	void funnyJobNames() {
 		ctn = parse("a-b-c");
 		assertFlow(ctn.getStart(), "a-b-c");
 		ctn = parse("a-b-c && d-e-f");
@@ -1205,74 +1186,73 @@ public void funnyJobNames() {
 	}
 
 	@Test
-	public void names() {
+	void names() {
 		ctn = parse("aaaa: foo");
 		List sequences = ctn.getSequences();
-		assertEquals("aaaa", sequences.get(0).getLabelString());
+		assertThat(sequences.get(0).getLabelString()).isEqualTo("aaaa");
 		ctn = parse("aaaa: foo && bar");
 		sequences = ctn.getSequences();
-		assertEquals("aaaa", sequences.get(0).getLabelString());
+		assertThat(sequences.get(0).getLabelString()).isEqualTo("aaaa");
 	}
 
 	@Test
-	public void nestedSplit1() {
+	void nestedSplit1() {
 		TaskNode ctn = parse("< || jobC>");
-		assertEquals("< || jobC>", ctn.stringify());
+		assertThat(ctn.stringify()).isEqualTo("< || jobC>");
 		LabelledTaskNode start = ctn.getStart();
-		assertTrue(start instanceof FlowNode);
+		assertThat(start instanceof FlowNode).isTrue();
 		SplitNode split = (SplitNode) ((FlowNode) start).getSeriesElement(0);
 		LabelledTaskNode seriesElement = ((FlowNode) split.getSeriesElement(0)).getSeriesElement(0);
-		assertTrue(seriesElement instanceof SplitNode);
+		assertThat(seriesElement instanceof SplitNode).isTrue();
 		SplitNode split2 = (SplitNode) seriesElement;
-		assertEquals(2, split2.getSeriesLength());
+		assertThat(split2.getSeriesLength()).isEqualTo(2);
 	}
 
 	@Test
-	public void nestedSplit2() {
+	void nestedSplit2() {
 		TaskNode ctn = parse(" || jobD>");
-		assertEquals(" || jobD>", ctn.stringify());
+		assertThat(ctn.stringify()).isEqualTo(" || jobD>");
 		LabelledTaskNode start = ctn.getStart();
-		assertTrue(start.isFlow());
+		assertThat(start.isFlow()).isTrue();
 		SplitNode split = (SplitNode) ((FlowNode) start).getSeriesElement(0);
-		assertEquals(3, split.getSeriesLength());
+		assertThat(split.getSeriesLength()).isEqualTo(3);
 		LabelledTaskNode seriesElement = split.getSeriesElement(1);
 		SplitNode splitSeriesElement = (SplitNode) ((FlowNode) seriesElement).getSeriesElement(0);
-		assertTrue(splitSeriesElement.isSplit());
-		assertEquals(2, splitSeriesElement.getSeriesLength());
-		assertEquals("", splitSeriesElement.stringify());
-		assertEquals("jobB",
-				((TaskAppNode) ((FlowNode) splitSeriesElement.getSeriesElement(0)).getSeriesElement(0)).getName());
+		assertThat(splitSeriesElement.isSplit()).isTrue();
+		assertThat(splitSeriesElement.getSeriesLength()).isEqualTo(2);
+		assertThat(splitSeriesElement.stringify()).isEqualTo("");
+		assertThat(((TaskAppNode) ((FlowNode) splitSeriesElement.getSeriesElement(0)).getSeriesElement(0)).getName()).isEqualTo("jobB");
 	}
 
 	@Test
-	public void singleTransition() {
+	void singleTransition() {
 		TaskNode ctn = parse("foo 'completed'->bar");
 		LabelledTaskNode start = ctn.getStart();
 		start = ((FlowNode) start).getSeriesElement(0);
-		assertTrue(start instanceof TaskAppNode);
+		assertThat(start instanceof TaskAppNode).isTrue();
 		TaskAppNode ta = (TaskAppNode) start;
 		List transitions = ta.getTransitions();
-		assertEquals(1, transitions.size());
-		assertEquals("completed", transitions.get(0).getStatusToCheck());
-		assertEquals("bar", transitions.get(0).getTargetApp().getName());
+		assertThat(transitions).hasSize(1);
+		assertThat(transitions.get(0).getStatusToCheck()).isEqualTo("completed");
+		assertThat(transitions.get(0).getTargetApp().getName()).isEqualTo("bar");
 	}
 
 	@Test
-	public void doubleTransition() {
+	void doubleTransition() {
 		TaskNode ctn = parse("foo 'completed'->bar 'wibble'->wobble");
 		LabelledTaskNode start = ctn.getStart();
 		assertFlow(start, "foo");
-		TaskAppNode ta = (TaskAppNode) ((FlowNode) start).getSeriesElement(0);
+		TaskAppNode ta = (TaskAppNode) start.getSeriesElement(0);
 		List transitions = ta.getTransitions();
-		assertEquals(2, transitions.size());
-		assertEquals("completed", transitions.get(0).getStatusToCheck());
-		assertEquals("bar", transitions.get(0).getTargetApp().getName());
-		assertEquals("wibble", transitions.get(1).getStatusToCheck());
-		assertEquals("wobble", transitions.get(1).getTargetApp().getName());
+		assertThat(transitions).hasSize(2);
+		assertThat(transitions.get(0).getStatusToCheck()).isEqualTo("completed");
+		assertThat(transitions.get(0).getTargetApp().getName()).isEqualTo("bar");
+		assertThat(transitions.get(1).getStatusToCheck()).isEqualTo("wibble");
+		assertThat(transitions.get(1).getTargetApp().getName()).isEqualTo("wobble");
 	}
 
 	@Test
-	public void moreSophisticatedScenarios_gh712_1a() {
+	void moreSophisticatedScenarios_gh712_1a() {
 		TaskNode ctn = parse(
 				"< && timestamp || spark-yarn>");
 
@@ -1280,174 +1260,160 @@ public void moreSophisticatedScenarios_gh712_1a() {
 		// https://user-images.githubusercontent.com/1562654/38313990-27662f60-37da-11e8-9106-26688d631fae.png
 		LabelledTaskNode start = ctn.getStart();
 		FlowNode f1 = (FlowNode) start;
-		assertEquals(1, f1.getSeriesLength());
+		assertThat(f1.getSeriesLength()).isEqualTo(1);
 		SplitNode s1 = (SplitNode) f1.getSeriesElement(0);
-		assertEquals(2, s1.getSeriesLength());
+		assertThat(s1.getSeriesLength()).isEqualTo(2);
 		// This one is just spark-yarn
 		assertFlow(s1.getSeriesElement(1), "spark-yarn");
 
 		// This one is a flow of a split of jdbchdfs-local/spark-client and
 		// spark-cluster/spark-cluster and then timestamp
 		FlowNode f2 = (FlowNode) s1.getSeriesElement(0);
-		assertEquals(2, f2.getSeriesLength());
-		assertEquals("timestamp", ((TaskAppNode) f2.getSeriesElement(1)).getName());
+		assertThat(f2.getSeriesLength()).isEqualTo(2);
+		assertThat(((TaskAppNode) f2.getSeriesElement(1)).getName()).isEqualTo("timestamp");
 
 		SplitNode s2 = (SplitNode) f2.getSeriesElement(0);
-		assertEquals(2, s2.getSeriesLength());
+		assertThat(s2.getSeriesLength()).isEqualTo(2);
 		FlowNode s2fa = (FlowNode) s2.getSeriesElement(0);
 		FlowNode s2fb = (FlowNode) s2.getSeriesElement(1);
 		assertFlow(s2fa, "jdbchdfs-local", "spark-client");
 		assertFlow(s2fb, "spark-cluster", "spark-cluster");
 
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:jdbchdfs-local][2:spark-client][3:spark-cluster][4:two:spark-cluster][5:timestamp][6:spark-yarn][7:END]"+
-				"[0-1][1-2][0-3][3-4][2-5][4-5][0-6][5-7][6-7]",
-				graph.toVerboseString());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:jdbchdfs-local][2:spark-client][3:spark-cluster][4:two:spark-cluster][5:timestamp][6:spark-yarn][7:END]" +
+		"[0-1][1-2][0-3][3-4][2-5][4-5][0-6][5-7][6-7]");
 
-		assertEquals(
-				"< && timestamp || spark-yarn>",
-				graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("< && timestamp || spark-yarn>");
 	}
 
 	@Test
-	public void moreSophisticatedScenarios_gh712_1b() {
+	void moreSophisticatedScenarios_gh712_1b() {
 		TaskNode ctn = parse("< && CC || DD>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AA][2:BB][3:CC][4:DD][5:END]" +
-				"[0-1][0-2][1-3][2-3][0-4][3-5][4-5]",
-				graph.toVerboseString());
-		assertEquals("< && CC || DD>", graph.toDSLText());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:END]" +
+		"[0-1][0-2][1-3][2-3][0-4][3-5][4-5]");
+		assertThat(graph.toDSLText()).isEqualTo("< && CC || DD>");
 	}
 
 	@Test
-	public void moreSophisticatedScenarios_gh712_1c() {
+	void moreSophisticatedScenarios_gh712_1c() {
 		TaskNode ctn = parse("< && CC && DD || EE>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:END]" +
-				"[0-1][0-2][1-3][2-3][3-4][0-5][4-6][5-6]",
-				graph.toVerboseString());
-		assertEquals("< && CC && DD || EE>", graph.toDSLText());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:END]" +
+		"[0-1][0-2][1-3][2-3][3-4][0-5][4-6][5-6]");
+		assertThat(graph.toDSLText()).isEqualTo("< && CC && DD || EE>");
 		ctn = parse("< && CC && DD || EE>");
-		assertEquals("< && CC && DD || EE>", ctn.toGraph().toDSLText());
+		assertThat(ctn.toGraph().toDSLText()).isEqualTo("< && CC && DD || EE>");
 	}
 
 	@Test
-	public void moreSophisticatedScenarios_gh712_1d() {
+	void moreSophisticatedScenarios_gh712_1d() {
 		TaskNode ctn = parse("< && AG || AB>");
-		assertEquals("< && AG || AB>", ctn.toGraph().toDSLText());
+		assertThat(ctn.toGraph().toDSLText()).isEqualTo("< && AG || AB>");
 		// Now include a transition
 		ctn = parse("< AH && AF> && AG || AB>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AC][2:AD][3:AE][4:AH][5:AF][6:AG][7:AB][8:END]" +
-				"[0-1][1-2][0-3][jumpOut:3-4][3-5][2-6][5-6][4-6][0-7][6-8][7-8]",
-				graph.toVerboseString());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AC][2:AD][3:AE][4:AH][5:AF][6:AG][7:AB][8:END]" +
+		"[0-1][1-2][0-3][jumpOut:3-4][3-5][2-6][5-6][4-6][0-7][6-8][7-8]");
 		// Key thing to observe above is the link from [4-6] which goes from
 		// the transition target AH to the end of the split AG
-		assertEquals("<AH && AF> && AG || AB>", graph.toDSLText());
+		assertThat(graph.toDSLText()).isEqualTo("<AH && AF> && AG || AB>");
 	}
 
 	@Test
-	public void moreSophisticatedScenarios_gh712_1e() {
+	void moreSophisticatedScenarios_gh712_1e() {
 		TaskNode ctn = parse("< && CC && DD ||  && GG || HH>");
 		Graph graph = ctn.toGraph();
-		assertEquals(
-				"[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]" +
-				"[0-1][0-2][1-3][2-3][3-4][0-5][0-6][5-7][6-7][0-8][4-9][7-9][8-9]",
-				graph.toVerboseString());
-		assertEquals("< && CC && DD ||  && GG || HH>", graph.toDSLText());
+		assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]" +
+		"[0-1][0-2][1-3][2-3][3-4][0-5][0-6][5-7][6-7][0-8][4-9][7-9][8-9]");
+		assertThat(graph.toDSLText()).isEqualTo("< && CC && DD ||  && GG || HH>");
 	}
 
 	@Test
-	public void moreSophisticatedScenarios_gh712_1f() {
+	void moreSophisticatedScenarios_gh712_1f() {
 		// Multiple nested splits in parallel
 		TaskNode ctn = parse("< && CC || 
&& FF && GG || HH>"); Graph graph = ctn.toGraph(); - assertEquals( - "[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]"+ - "[0-1][0-2][1-3][2-3][0-4][0-5][4-6][5-6][6-7][0-8][3-9][7-9][8-9]", - graph.toVerboseString()); - assertEquals("< && CC ||
&& FF && GG || HH>", graph.toDSLText()); + assertThat(graph.toVerboseString()).isEqualTo("[0:START][1:AA][2:BB][3:CC][4:DD][5:EE][6:FF][7:GG][8:HH][9:END]" + + "[0-1][0-2][1-3][2-3][0-4][0-5][4-6][5-6][6-7][0-8][3-9][7-9][8-9]"); + assertThat(graph.toDSLText()).isEqualTo("< && CC ||
&& FF && GG || HH>"); } // Case2: expecting a validation error on the parse because the second spark-cluster // isn't labeled @Test - public void moreSophisticatedScenarios_gh712_2() { + void moreSophisticatedScenarios_gh712_2() { try { parse("< && timestamp || spark-yarn>"); - fail(); + fail(""); } catch (TaskValidationException tve) { List validationProblems = tve.getValidationProblems(); - assertEquals(1, validationProblems.size()); + assertThat(validationProblems).hasSize(1); TaskValidationProblem tvp = validationProblems.get(0); - assertEquals(53, tvp.getOffset()); - assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, tvp.getMessage()); + assertThat(tvp.getOffset()).isEqualTo(53); + assertThat(tvp.getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE); } } // Case3: no graph when 1 label included? @Test - public void moreSophisticatedScenarios_gh712_3() { + void moreSophisticatedScenarios_gh712_3() { try { parse("<1: jdbchdfs-local && spark-client && timestamp || spark-cluster && spark-cluster && timestamp || spark-yarn>"); - fail(); + fail(""); } catch (TaskValidationException tve) { System.out.println(tve); List validationProblems = tve.getValidationProblems(); - assertEquals(2, validationProblems.size()); + assertThat(validationProblems).hasSize(2); TaskValidationProblem tvp = validationProblems.get(0); - assertEquals(68, tvp.getOffset()); - assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, tvp.getMessage()); + assertThat(tvp.getOffset()).isEqualTo(68); + assertThat(tvp.getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE); tvp = validationProblems.get(1); - assertEquals(85, tvp.getOffset()); - assertEquals(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE, tvp.getMessage()); + assertThat(tvp.getOffset()).isEqualTo(85); + assertThat(tvp.getMessage()).isEqualTo(DSLMessage.TASK_VALIDATION_APP_NAME_ALREADY_IN_USE); } } @Test - public void wildcardTransition() { + void wildcardTransition() { ctn = parse("foo '*'->wibble"); - assertEquals("foo '*'->wibble", ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo("foo '*'->wibble"); ctn = parse("foo \"*\"->wibble"); - assertEquals("foo \"*\"->wibble", ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo("foo \"*\"->wibble"); } @Test - public void splitWithTransition() { + void splitWithTransition() { String spec = "kill || bar>"; ctn = parse(spec); - assertEquals(spec, ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo(spec); } @Test - public void multiLine() { + void multiLine() { TaskNode ctn = parse("kill\n" + " '*'->custard\n" + " || bar>"); - assertEquals("kill '*'->custard || bar>", ctn.stringify()); + assertThat(ctn.stringify()).isEqualTo("kill '*'->custard || bar>"); } @Test - public void emptyInput() { + void emptyInput() { checkForParseError("", DSLMessage.OOD, 0); } @Test - public void toGraph$END() { + void toGraph$END() { TaskNode ctn = parse("foo 'oranges'->$END"); - assertEquals("foo 'oranges'->$END", ctn.toDSL()); + assertThat(ctn.toDSL()).isEqualTo("foo 'oranges'->$END"); assertGraph("[0:START][1:foo][2:$END][3:END][0-1][oranges:1-2][1-3]", "foo 'oranges'->$END"); checkDSLToGraphAndBackToDSL("foo 'oranges'->$END"); } @Test - public void toGraph$FAIL() { + void toGraph$FAIL() { String spec = "foo 'oranges'->$FAIL"; - assertEquals(spec, parse(spec).toDSL()); + assertThat(parse(spec).toDSL()).isEqualTo(spec); assertGraph("[0:START][1:foo][2:$FAIL][3:END][0-1][oranges:1-2][1-3]", spec); checkDSLToGraphAndBackToDSL(spec); } @@ -1456,7 +1422,7 @@ public void emptyInput() { // js = parse(" || boo"); @Test - public void toGraphWithTransition2() { + void toGraphWithTransition2() { // The target transition node hoo is not elsewhere on the list String definition = "hoo || bar> && boo && goo"; assertGraph("[0:START][1:foo][2:hoo][3:bar][4:boo][5:goo][6:END]" @@ -1465,7 +1431,7 @@ public void toGraphWithTransition2() { } @Test - public void spacesInProperties() { + void spacesInProperties() { // If a property value in the graph has a space in, quote it when creating dsl // If a transition code in the graph is not numeric or * then quote it Graph graph = parse("aaa").toGraph(); @@ -1478,53 +1444,53 @@ public void spacesInProperties() { properties.put("two", "b ar"); Node newNode = new Node(n.id, n.name, properties); graph.nodes.set(1, newNode); - assertEquals("aaa --one=bar --two='b ar'", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar'"); graph.nodes.add(new Node("3", "bbb")); graph.links.add(new Link("1", "3", "tname")); - assertEquals("aaa --one=bar --two='b ar' 'tname'->bbb", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar' 'tname'->bbb"); graph.nodes.add(new Node("4", "ccc")); graph.links.add(new Link("1", "4", "*")); - assertEquals("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc"); graph.nodes.add(new Node("5", "ddd")); graph.links.add(new Link("1", "5", "3")); - assertEquals("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc 3->ddd", graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo("aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc 3->ddd"); // When going from DSL to graph, unquote property values and exit codes String dsl = "aaa --one=bar --two='b ar' 'tname'->bbb '*'->ccc 3->ddd"; graph = parse(dsl).toGraph(); n = graph.nodes.get(1); - assertEquals("b ar", n.properties.get("two")); + assertThat(n.properties).containsEntry("two", "b ar"); Link l = graph.links.get(1); - assertEquals("tname", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("tname"); l = graph.links.get(2); - assertEquals("*", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("*"); l = graph.links.get(3); - assertEquals("3", l.getTransitionName()); - assertEquals(dsl, graph.toDSLText()); + assertThat(l.getTransitionName()).isEqualTo("3"); + assertThat(graph.toDSLText()).isEqualTo(dsl); } @Test - public void wildcardTransitions() { + void wildcardTransitions() { // When going from DSL to graph, unquote property values and exit codes String dsl = "aaa 'tname'->bbb '*'->ccc 3->ddd"; assertGraph("[0:START][1:aaa][2:bbb][3:ccc][4:ddd][5:END][0-1][tname:1-2][*:1-3][3:1-4][1-5][2-5][3-5][4-5]", dsl); Graph graph = parse(dsl).toGraph(); Link l = graph.links.get(1); - assertEquals("tname", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("tname"); l = graph.links.get(2); - assertEquals("*", l.getTransitionName()); + assertThat(l.getTransitionName()).isEqualTo("*"); l = graph.links.get(3); - assertEquals("3", l.getTransitionName()); - assertEquals(dsl, graph.toDSLText()); + assertThat(l.getTransitionName()).isEqualTo("3"); + assertThat(graph.toDSLText()).isEqualTo(dsl); } @Test - public void multiTransitionToSameTarget() { + void multiTransitionToSameTarget() { String spec = "foo 'failed'->bbb && bar 'failed'->bbc"; assertGraph("[0:START][1:foo][2:bbb][3:bar][4:bbc][5:END][0-1][failed:1-2][1-3][failed:3-4][3-5][2-5][4-5]", spec); @@ -1532,13 +1498,13 @@ public void multiTransitionToSameTarget() { } @Test - public void extraneousDataError() { + void extraneousDataError() { String jobSpecification = " rubbish"; checkForParseError(jobSpecification, DSLMessage.TASK_MORE_INPUT, 9, "rubbish"); } @Test - public void incorrectTransition() { + void incorrectTransition() { checkForParseError("foo ||->bar", DSLMessage.TASK_MORE_INPUT, 4, "||"); } @@ -1564,43 +1530,43 @@ private TaskNode parse(String composedTaskName, String dsltext, boolean validate } private void assertToken(TokenKind kind, String string, int start, int end, Token t) { - assertEquals(kind, t.kind); - assertEquals(string, t.getKind().hasPayload() ? t.stringValue() : new String(t.getKind().getTokenChars())); - assertEquals(start, t.startPos); - assertEquals(end, t.endPos); + assertThat(t.kind).isEqualTo(kind); + assertThat(t.getKind().hasPayload() ? t.stringValue() : new String(t.getKind().getTokenChars())).isEqualTo(string); + assertThat(t.startPos).isEqualTo(start); + assertThat(t.endPos).isEqualTo(end); } private void assertTokens(Tokens tokens, TokenKind... expectedKinds) { for (int i = 0; i < expectedKinds.length; i++) { - assertEquals(expectedKinds[i], tokens.next().getKind()); + assertThat(tokens.next().getKind()).isEqualTo(expectedKinds[i]); } } private void assertTaskApp(LabelledTaskNode node, String taskAppName) { - assertTrue(node.isTaskApp()); - assertEquals(((TaskAppNode) node).getName(), taskAppName); + assertThat(node.isTaskApp()).isTrue(); + assertThat(taskAppName).isEqualTo(((TaskAppNode) node).getName()); } private void assertFlow(LabelledTaskNode node, String... expectedApps) { - assertTrue(node instanceof FlowNode); + assertThat(node instanceof FlowNode).isTrue(); FlowNode flow = (FlowNode) node; List series = flow.getSeries(); - assertEquals(expectedApps.length, series.size()); - assertEquals(expectedApps.length, flow.getSeriesLength()); + assertThat(series).hasSize(expectedApps.length); + assertThat(flow.getSeriesLength()).isEqualTo(expectedApps.length); for (int a = 0; a < expectedApps.length; a++) { assertTaskApp(series.get(a), expectedApps[a]); } } private void assertSplit(LabelledTaskNode node, String... expectedApps) { - assertTrue(node instanceof SplitNode); + assertThat(node instanceof SplitNode).isTrue(); SplitNode split = (SplitNode) node; List series = split.getSeries(); - assertEquals(expectedApps.length, series.size()); - assertEquals(expectedApps.length, split.getSeriesLength()); + assertThat(series).hasSize(expectedApps.length); + assertThat(split.getSeriesLength()).isEqualTo(expectedApps.length); for (int a = 0; a < expectedApps.length; a++) { FlowNode f = (FlowNode) series.get(a); - assertEquals(1, f.getSeriesLength()); + assertThat(f.getSeriesLength()).isEqualTo(1); assertTaskApp(f.getSeriesElement(0), expectedApps[a]); } } @@ -1612,11 +1578,11 @@ private ParseException checkForParseError(String dsl, DSLMessage msg, int pos, O return null; } catch (ParseException e) { - assertEquals(msg, e.getMessageCode()); - assertEquals(pos, e.getPosition()); + assertThat(e.getMessageCode()).isEqualTo(msg); + assertThat(e.getPosition()).isEqualTo(pos); if (inserts != null) { for (int i = 0; i < inserts.length; i++) { - assertEquals(inserts[i], e.getInserts()[i]); + assertThat(e.getInserts()[i]).isEqualTo(inserts[i]); } } return e; @@ -1624,8 +1590,7 @@ private ParseException checkForParseError(String dsl, DSLMessage msg, int pos, O } private void assertApps(List taskApps, String... expectedTaskAppNames) { - assertEquals("Expected " + expectedTaskAppNames.length + " but was " + taskApps.size() + ": " + taskApps, - expectedTaskAppNames.length, taskApps.size()); + assertThat(taskApps.size()).as("Expected " + expectedTaskAppNames.length + " but was " + taskApps.size() + ": " + taskApps).isEqualTo(expectedTaskAppNames.length); Set set2 = new HashSet(); for (TaskApp taskApp : taskApps) { StringBuilder s = new StringBuilder(); @@ -1652,13 +1617,13 @@ private void assertApps(List taskApps, String... expectedTaskAppNames) private void checkDSLToGraphAndBackToDSL(String specification) { TaskNode ctn = parse(specification); Graph graph = ctn.toGraph(); - assertEquals(specification, graph.toDSLText()); + assertThat(graph.toDSLText()).isEqualTo(specification); } private void assertGraph(String expectedGraph, String dsl) { TaskNode ctn = parse(dsl); Graph graph = ctn.toGraph(); - assertEquals(expectedGraph, graph.toVerboseString()); + assertThat(graph.toVerboseString()).isEqualTo(expectedGraph); } private void assertTaskApps(String composedTaskName, String spec, String... expectedTaskApps) { @@ -1673,7 +1638,7 @@ private void assertTaskApps(String composedTaskName, String spec, String... expe s.append(":").append(arg.getKey()).append("=").append(arg.getValue()); } } - assertEquals(s.toString(), expectedTaskApp); + assertThat(expectedTaskApp).isEqualTo(s.toString()); } } diff --git a/spring-cloud-dataflow-core/pom.xml b/spring-cloud-dataflow-core/pom.xml index 49badaf22c..1662737ee0 100644 --- a/spring-cloud-dataflow-core/pom.xml +++ b/spring-cloud-dataflow-core/pom.xml @@ -4,10 +4,18 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-core + spring-cloud-dataflow-core + Spring Cloud Data Flow Core + jar + + true + 3.4.1 + org.springframework.boot @@ -20,9 +28,14 @@ + + org.springframework.cloud + spring-cloud-task-batch + org.springframework.cloud spring-cloud-dataflow-core-dsl + ${project.version} org.springframework.cloud @@ -32,6 +45,18 @@ com.fasterxml.jackson.core jackson-annotations + + com.fasterxml.jackson.core + jackson-databind + + + jakarta.persistence + jakarta.persistence-api + + + jakarta.validation + jakarta.validation-api + org.springframework.data spring-data-keyvalue @@ -41,8 +66,8 @@ spring-boot-starter-logging - commons-lang - commons-lang + org.apache.commons + commons-text org.springframework.data @@ -53,21 +78,49 @@ spring-data-commons - org.hibernate + org.hibernate.orm hibernate-core - javax.validation - validation-api + org.springframework.hateoas + spring-hateoas org.springframework.boot spring-boot-starter-test test - - org.springframework.hateoas - spring-hateoas - + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractEntity.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractEntity.java index 344354d3c2..508ebf2e81 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractEntity.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractEntity.java @@ -15,13 +15,12 @@ */ package org.springframework.cloud.dataflow.core; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.MappedSuperclass; -import javax.persistence.Version; - import com.fasterxml.jackson.annotation.JsonIgnore; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.MappedSuperclass; +import jakarta.persistence.Version; /** * Base class for entity implementations. Uses a {@link Long} id. diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractPlatformProperties.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractPlatformProperties.java index dc3afba472..db16ff77ec 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractPlatformProperties.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractPlatformProperties.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ /** * @author David Turanski + * @author Ilayaperumal Gopinathan **/ public abstract class AbstractPlatformProperties

{ private Map accounts = new LinkedHashMap<>(); @@ -41,4 +42,19 @@ public P accountProperties(String account) { return properties; } + /** + * Check if the account name exists in the platform accounts. + * + * @param account the name of the account + * @return boolean value representing the existence of the account with the given name + */ + public boolean accountExists(String account) { + for (String accountKey : this.getAccounts().keySet()) { + if (accountKey.equals(account)) { + return true; + } + } + return false; + } + } diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractTaskPlatformFactory.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractTaskPlatformFactory.java index e96b9f6a51..c0a307061c 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractTaskPlatformFactory.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AbstractTaskPlatformFactory.java @@ -31,7 +31,7 @@ public abstract class AbstractTaskPlatformFactory

arguments, String errorMessage, String externalExecutionId, Long parentExecutionId) { + super(executionId, exitCode, taskName, startTime, endTime, exitMessage, arguments, errorMessage, externalExecutionId, parentExecutionId); + } + + public ThinTaskExecution(long executionId, Integer exitCode, String taskName, LocalDateTime startTime, LocalDateTime endTime, String exitMessage, List arguments, String errorMessage, String externalExecutionId) { + super(executionId, exitCode, taskName, startTime, endTime, exitMessage, arguments, errorMessage, externalExecutionId); + } + + public String getCtrTaskStatus() { + return ctrTaskStatus; + } + + public void setCtrTaskStatus(String ctrTaskStatus) { + this.ctrTaskStatus = ctrTaskStatus; + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/UriPersistenceConverter.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/UriPersistenceConverter.java index c62ff2bc86..21e996bd92 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/UriPersistenceConverter.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/UriPersistenceConverter.java @@ -17,8 +17,8 @@ import java.net.URI; -import javax.persistence.AttributeConverter; -import javax.persistence.Converter; +import jakarta.persistence.AttributeConverter; +import jakarta.persistence.Converter; import org.springframework.util.StringUtils; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java similarity index 75% rename from spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java rename to spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java index 40f3b6c05b..f207ac3df5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,8 +14,9 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.server.repository.support; +package org.springframework.cloud.dataflow.core.database.support; +import java.sql.DatabaseMetaData; import java.util.HashMap; import java.util.Map; @@ -37,7 +38,9 @@ public enum DatabaseType { HSQL("HSQL Database Engine"), H2("H2"), ORACLE("Oracle"), + MARIADB("MariaDB"), MYSQL("MySQL"), + POSTGRES("PostgreSQL"), SQLSERVER("Microsoft SQL Server"), DB2("DB2"); @@ -73,12 +76,10 @@ public static DatabaseType fromMetaData(DataSource dataSource) throws MetaDataAc .toString(); if (!databaseProductVersion.startsWith("SQL")) { databaseProductName = "DB2ZOS"; - } - else { + } else { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } - } - else { + } else if(!databaseProductName.equals("MariaDB")) { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } return fromProductName(databaseProductName); @@ -100,6 +101,24 @@ public static DatabaseType fromProductName(String productName) { } } + /** + * Determines if the Database that the datasource refers to supports the {@code ROW_NUMBER()} SQL function. + * @param dataSource the datasource pointing to the DB in question + * @return whether the database supports the SQL {@code ROW_NUMBER()} function + * @throws MetaDataAccessException if error occurs + */ + public static boolean supportsRowNumberFunction(DataSource dataSource) throws MetaDataAccessException { + DatabaseType databaseType = DatabaseType.fromMetaData(dataSource); + if (databaseType == DatabaseType.H2 || databaseType == DatabaseType.HSQL) { + return false; + } + if (databaseType != DatabaseType.MYSQL) { + return true; + } + int majorVersion = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseMajorVersion); + return (majorVersion >= 8); + } + private String getProductName() { return productName; } diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java new file mode 100644 index 0000000000..87f8f26667 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.core.database.support; + +public enum IncrementerType { + DEFAULT, + TABLE, + SEQUENCE +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java new file mode 100644 index 0000000000..ab6ee1cdc8 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java @@ -0,0 +1,18 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.jdbc.support.incrementer.AbstractSequenceMaxValueIncrementer; + +public class MariaDBSequenceMaxValueIncrementer extends AbstractSequenceMaxValueIncrementer { + public MariaDBSequenceMaxValueIncrementer() { + } + + public MariaDBSequenceMaxValueIncrementer(DataSource dataSource, String incrementerName) { + super(dataSource, incrementerName); + } + + protected String getSequenceQuery() { + return "select next value for " + this.getIncrementerName(); + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java new file mode 100644 index 0000000000..039acacc59 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java @@ -0,0 +1,89 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Locale; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +public class MultiSchemaIncrementerFactory extends DefaultDataFieldMaxValueIncrementerFactory { + private final static Logger logger = LoggerFactory.getLogger(MultiSchemaIncrementerFactory.class); + + private final DataSource dataSource; + + public MultiSchemaIncrementerFactory(DataSource dataSource) { + super(dataSource); + this.dataSource = dataSource; + } + + @Override + public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { + DatabaseType databaseType; + try { + databaseType = DatabaseType.fromMetaData(this.dataSource); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + if (databaseType != null) { + IncrementerType type = getIncrementerType(databaseType, incrementerName); + if (type == IncrementerType.SEQUENCE) { + switch (databaseType) { + case SQLSERVER: + return new SqlServerSequenceMaxValueIncrementer(this.dataSource, incrementerName); + case MARIADB: + return new MariaDBSequenceMaxValueIncrementer(this.dataSource, incrementerName); + } + } + } + return super.getIncrementer(incrementerType, incrementerName); + } + + private IncrementerType getIncrementerType(DatabaseType databaseType, String incrementerName) { + + try (Connection connection = this.dataSource.getConnection()) { + if(databaseType == DatabaseType.SQLSERVER) { + try(Statement statement = connection.createStatement()) { + try(ResultSet sequences = statement.executeQuery("SELECT name FROM sys.sequences")) { + while (sequences.next()) { + String sequenceName = sequences.getString(1); + logger.debug("Sequence:{}", sequenceName); + if(sequenceName.equalsIgnoreCase(incrementerName)) { + return IncrementerType.SEQUENCE; + } + } + } + } catch (Throwable x) { + logger.warn("Ignoring error:" + x); + } + } + DatabaseMetaData metaData = connection.getMetaData(); + String[] types = {"TABLE", "SEQUENCE"}; + try (ResultSet tables = metaData.getTables(null, null, "%", types)) { + while (tables.next()) { + String tableName = tables.getString("TABLE_NAME"); + if (tableName.equalsIgnoreCase(incrementerName)) { + String tableType = tables.getString("TABLE_TYPE"); + logger.debug("Found Table:{}:{}", incrementerName, tableType); + if (tableType != null && tableType.toUpperCase(Locale.ROOT).contains("SEQUENCE")) { + return IncrementerType.SEQUENCE; + } + return IncrementerType.TABLE; + } + } + } + } catch (SQLException sqe) { + logger.warn(sqe.getMessage(), sqe); + } + return IncrementerType.DEFAULT; + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java new file mode 100644 index 0000000000..e301274a9c --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java @@ -0,0 +1,18 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.jdbc.support.incrementer.AbstractSequenceMaxValueIncrementer; + +public class SqlServerSequenceMaxValueIncrementer extends AbstractSequenceMaxValueIncrementer { + public SqlServerSequenceMaxValueIncrementer() { + } + + public SqlServerSequenceMaxValueIncrementer(DataSource dataSource, String incrementerName) { + super(dataSource, incrementerName); + } + + protected String getSequenceQuery() { + return "select next value for " + this.getIncrementerName(); + } +} diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java index 908e8ebab2..e5908c2e9c 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppRegistrationTests.java @@ -18,7 +18,7 @@ import java.net.URI; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; @@ -26,8 +26,9 @@ * Unit tests for {@link AppRegistration}. * * @author Eric Bottard + * @author Corneil du Plessis */ -public class AppRegistrationTests { +class AppRegistrationTests { // @Test // public void testResource() { @@ -54,7 +55,7 @@ public class AppRegistrationTests { // } @Test - public void testCompareTo() { + void compareTo() { AppRegistration registration1 = new AppRegistration("foo", ApplicationType.task, URI.create("file:///foobar")); AppRegistration registration2 = new AppRegistration("foo2", ApplicationType.task, URI.create("file:///foobar2")); assertThat(registration1).isNotEqualByComparingTo(registration2); @@ -65,7 +66,7 @@ public void testCompareTo() { } @Test - public void testToString() { + void testToString() { AppRegistration registration1 = new AppRegistration("foo", ApplicationType.task, URI.create("file:///foobar"), URI.create("file:///foobar-metadata")); assertThat(registration1.toString()).contains("foo").contains("task").contains("file:///foobar") diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java index 9021c98ae5..32674785c1 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/ArgumentSanitizerTest.java @@ -19,36 +19,38 @@ import java.util.ArrayList; import java.util.List; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; /** * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class ArgumentSanitizerTest { +class ArgumentSanitizerTest { private ArgumentSanitizer sanitizer; private static final String[] keys = { "password", "secret", "key", "token", ".*credentials.*", "vcap_services", "url" }; - @Before - public void before() { + @BeforeEach + void before() { sanitizer = new ArgumentSanitizer(); } @Test - public void testSanitizeProperties() { + void sanitizeProperties() { for (String key : keys) { - Assert.assertEquals("--" + key + "=******", sanitizer.sanitize("--" + key + "=foo")); - Assert.assertEquals("******", sanitizer.sanitize(key, "bar")); + assertThat(sanitizer.sanitize("--" + key + "=foo")).isEqualTo("--" + key + "=******"); + assertThat(sanitizer.sanitize(key, "bar")).isEqualTo("******"); } } @Test - public void testSanitizeArguments() { + void sanitizeArguments() { final List arguments = new ArrayList<>(); for (String key : keys) { @@ -57,38 +59,38 @@ public void testSanitizeArguments() { final List sanitizedArguments = sanitizer.sanitizeArguments(arguments); - Assert.assertEquals(keys.length, sanitizedArguments.size()); + assertThat(sanitizedArguments).hasSize(keys.length); int order = 0; for(String sanitizedString : sanitizedArguments) { - Assert.assertEquals("--" + keys[order] + "=******", sanitizedString); + assertThat(sanitizedString).isEqualTo("--" + keys[order] + "=******"); order++; } } @Test - public void testMultipartProperty() { - Assert.assertEquals("--password=******", sanitizer.sanitize("--password=boza")); - Assert.assertEquals("--one.two.password=******", sanitizer.sanitize("--one.two.password=boza")); - Assert.assertEquals("--one_two_password=******", sanitizer.sanitize("--one_two_password=boza")); + void multipartProperty() { + assertThat(sanitizer.sanitize("--password=boza")).isEqualTo("--password=******"); + assertThat(sanitizer.sanitize("--one.two.password=boza")).isEqualTo("--one.two.password=******"); + assertThat(sanitizer.sanitize("--one_two_password=boza")).isEqualTo("--one_two_password=******"); } // @Test // public void testHierarchicalPropertyNames() { -// Assert.assertEquals("time --password='******' | log", +// assertEquals("time --password='******' | log", // sanitizer.(new StreamDefinition("stream", "time --password=bar | log"))); // } // // @Test // public void testStreamPropertyOrder() { -// Assert.assertEquals("time --some.password='******' --another-secret='******' | log", +// assertEquals("time --some.password='******' --another-secret='******' | log", // sanitizer.sanitizeStream(new StreamDefinition("stream", "time --some.password=foobar --another-secret=kenny | log"))); // } // // @Test // public void testStreamMatcherWithHyphenDotChar() { -// Assert.assertEquals("twitterstream --twitter.credentials.access-token-secret='******' " +// assertEquals("twitterstream --twitter.credentials.access-token-secret='******' " // + "--twitter.credentials.access-token='******' --twitter.credentials.consumer-secret='******' " // + "--twitter.credentials.consumer-key='******' | " // + "filter --expression=#jsonPath(payload,'$.lang')=='en' | " @@ -105,6 +107,6 @@ public void testMultipartProperty() { // @Test // public void testStreamSanitizeOriginalDsl() { // StreamDefinition streamDefinition = new StreamDefinition("test", "time --password='******' | log --password='******'", "time --password='******' | log"); -// Assert.assertEquals("time --password='******' | log", sanitizer.sanitizeOriginalStreamDsl(streamDefinition)); +// assertEquals("time --password='******' | log", sanitizer.sanitizeOriginalStreamDsl(streamDefinition)); // } } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/Base64UtilsTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/Base64UtilsTests.java new file mode 100644 index 0000000000..ada514da07 --- /dev/null +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/Base64UtilsTests.java @@ -0,0 +1,39 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.core; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * Tests for {@code Base64Utils}. + * + * @author Janne Valkealahti + * @author Corneil du Plessis + */ +class Base64UtilsTests { + + @Test + void base64() { + assertThat(Base64Utils.decode(null)).isNull(); + assertThat(Base64Utils.encode(null)).isNull(); + assertThat(Base64Utils.decode(Base64Utils.encode("foo"))).isEqualTo("foo"); + assertThat(Base64Utils.decode(Base64Utils.encode("foo.*.1"))).isEqualTo("foo.*.1"); + assertThat(Base64Utils.decode("juststring")).isEqualTo("juststring"); + } +} diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java index 850f610d30..1d8898caea 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamApplicationDefinitionTests.java @@ -16,29 +16,30 @@ package org.springframework.cloud.dataflow.core; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Patrick Peralta * @author Mark Fisher + * @author Corneil du Plessis */ -public class StreamApplicationDefinitionTests { +class StreamApplicationDefinitionTests { private static final String OUTPUT_BINDING_KEY = "spring.cloud.stream.bindings.output"; @Test - public void testBuilder() { + void builder() { StreamAppDefinition definition = new StreamAppDefinition.Builder().setRegisteredAppName("time") .setLabel("label").setApplicationType(ApplicationType.source).setProperty(OUTPUT_BINDING_KEY, "channel").build("ticktock"); - assertEquals("ticktock", definition.getStreamName()); - assertEquals("time", definition.getRegisteredAppName()); - assertEquals("label", definition.getName()); - assertEquals(ApplicationType.source, definition.getApplicationType()); - assertEquals(1, definition.getProperties().size()); - assertEquals("channel", definition.getProperties().get(OUTPUT_BINDING_KEY)); + assertThat(definition.getStreamName()).isEqualTo("ticktock"); + assertThat(definition.getRegisteredAppName()).isEqualTo("time"); + assertThat(definition.getName()).isEqualTo("label"); + assertThat(definition.getApplicationType()).isEqualTo(ApplicationType.source); + assertThat(definition.getProperties()).hasSize(1); + assertThat(definition.getProperties()).containsEntry(OUTPUT_BINDING_KEY, "channel"); } } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java index 5f58e8d26b..46e07dd042 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionServiceUtilsTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2020 the original author or authors. + * Copyright 2015-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,120 +20,127 @@ import java.util.LinkedList; import java.util.List; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class StreamDefinitionServiceUtilsTests { +class StreamDefinitionServiceUtilsTests { StreamDefinitionService streamDefinitionService = new DefaultStreamDefinitionService(); @Test - public void testStreamCreation() { + void streamCreation() { reverseDslTest("time | log", 2); } - @Ignore @Test - public void quotesInParams() { - reverseDslTest("foo --bar='payload.matches(''hello'')' | file", 2); + void quotesInParams() { + reverseDslTest("foo --bar='payload.matches(\'hello\')' | file", 2); } @Test - public void quotesInParams2() { + void quotesInParams2() { reverseDslTest("http --port=9700 | filter --expression=\"payload.matches('hello world')\" | file", 3); } @Test - public void parameterizedApps() { + void parameterizedApps() { reverseDslTest("foo --x=1 --y=two | bar --z=3", 2); } @Test - public void testBindings3Apps() { + void bindings3Apps() { reverseDslTest("time | filter | log", 3); } - @Ignore @Test - public void testXD2416_1() { + void xd24161() { reverseDslTest("http | transform --expression='payload.replace(\"abc\", \"\")' | log", 3); } - @Ignore + @Disabled("The result from the parser is stating that the escaped single ticks is 2, but the assert is requesting 4 ticks. Verify the assert is correct.") @Test - public void testXD2416_2() { + void xd24162() { reverseDslTest("http | transform --expression='payload.replace(\"abc\", '''')' | log", 3); } @Test - public void testSourceDestinationArgs() { - reverseDslTest(":test --group=test > file", 1); + void sourceDestinationArgs() { + reverseDslTest(":test > file --group=test", 1); } @Test - public void testLabelsInStreams() { + void labelsInStreams() { reverseDslTest("http | step1: transform --expression=payload.toUpperCase()" + " | step2: transform --expression=payload+'!' | log", 4); } @Test - public void testLabelsInStreams2() { + void labelsInStreams2() { reverseDslTest("file | out: file", 2); } @Test - public void testTabsInStreams() { + void tabsInStreams() { reverseDslTest(":mainstream.http > counter", 1); reverseDslTest(":mainstream.step1 > jdbc", 1); } @Test - public void sourceDestinationNameIsAppliedToSourceApp() { + void sourceDestinationNameIsAppliedToSourceApp() { reverseDslTest(":foo > goo | blah | file", 3); } @Test - public void sinkDestinationNameIsAppliedToSinkApp() { + void sinkDestinationNameIsAppliedToSinkApp() { reverseDslTest("boo | blah | aaak > :foo", 3); } @Test - public void testSinkNamedDestination() { + void sinkNamedDestination() { reverseDslTest("bart > :foo", 1); } @Test - public void testSourceNamedDestination() { + void sourceNamedDestination() { reverseDslTest(":foo > boot", 1); } @Test - public void testBridge() { + void bridge() { reverseDslTest(":foo > :bar", 1); } private void reverseDslTest(String dslText, int expectedAppSize) { StreamDefinition streamDefinition = new StreamDefinition("streamName", dslText); - assertEquals(expectedAppSize, this.streamDefinitionService.getAppDefinitions(streamDefinition).size()); + assertThat(this.streamDefinitionService.getAppDefinitions(streamDefinition)).hasSize(expectedAppSize); - assertEquals(streamDefinition.getDslText(), - this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo(streamDefinition.getDslText()); } @Test - public void testExclusionOfDataFlowAddedProperties() { + void streamDslAppPropertyWithHyphen() { + String dslText = "foo --foo='key|value' | bar"; + + System.out.println(dslText); + StreamDefinition streamDefinition = new StreamDefinition("streamName", dslText); + + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("foo --foo='key|value' | bar"); + } + + @Test + void exclusionOfDataFlowAddedProperties() { List dataFlowAddedProperties = Arrays.asList( DataFlowPropertyKeys.STREAM_APP_TYPE, DataFlowPropertyKeys.STREAM_APP_LABEL, DataFlowPropertyKeys.STREAM_NAME, - BindingPropertyKeys.INPUT_GROUP, BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS, BindingPropertyKeys.OUTPUT_DESTINATION); @@ -143,27 +150,27 @@ public void testExclusionOfDataFlowAddedProperties() { System.out.println(dslText); StreamDefinition streamDefinition = new StreamDefinition("streamName", dslText); - assertEquals("foo | bar", - this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("foo | bar"); } } @Test - public void testInputDestinationProperty() { + void inputDestinationProperty() { String dslText = "foo --" + BindingPropertyKeys.INPUT_DESTINATION + "=boza | bar"; System.out.println(dslText); StreamDefinition streamDefinition = new StreamDefinition("streamName", dslText); - assertEquals(":boza > foo | bar", - this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo(":boza > foo | bar"); } @Test - public void testPropertyAutoQuotes() { + void propertyAutoQuotes() { - StreamDefinition streamDefinition = new StreamDefinition("streamName", "foo | bar"); + String streamName = "stream2"; + + StreamDefinition streamDefinition = new StreamDefinition(streamName, "foo | bar"); StreamAppDefinition foo = this.streamDefinitionService.getAppDefinitions(streamDefinition).get(0); StreamAppDefinition bar = this.streamDefinitionService.getAppDefinitions(streamDefinition).get(1); @@ -174,60 +181,56 @@ public void testPropertyAutoQuotes() { .setProperty("p3", "ef") .setProperty("p4", "'i' 'j'") .setProperty("p5", "\"k l\"") - .build("stream2"); + .build(streamName); StreamAppDefinition bar2 = StreamAppDefinition.Builder.from(bar) .setProperty("p1", "a b") .setProperty("p2", "'c d'") .setProperty("p3", "ef") - .build("stream2"); + .build(streamName); - assertEquals("foo --p1='a b' --p2=\"'c d'\" --p3=ef --p4=\"'i' 'j'\" --p5=\"k l\" | bar --p1='a b' --p2=\"'c d'\" --p3=ef", - this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), new LinkedList(Arrays.asList(foo2, bar2)))); + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), new LinkedList(Arrays.asList(foo2, bar2)))).isEqualTo("foo --p1='a b' --p2=\"'c d'\" --p3=ef --p4=\"'i' 'j'\" --p5=\"k l\" | bar --p1='a b' --p2=\"'c d'\" --p3=ef"); } @Test - public void autoQuotesOnSemicolonProperties() { + void autoQuotesOnSemicolonProperties() { StreamDefinition streamDefinition = new StreamDefinition("streamName", "http-source-kafka --server.port=9900 | couchbase-sink-kafka " + "--inputType=\"application/x-java-object;type=com.example.dto.InputDto\""); - assertEquals("http-source-kafka --server.port=9900 | couchbase-sink-kafka " + - "--spring.cloud.stream.bindings.input.contentType='application/x-java-object;type=com.example.dto.InputDto'", - this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("http-source-kafka --server.port=9900 | couchbase-sink-kafka " + + "--spring.cloud.stream.bindings.input.contentType='application/x-java-object;type=com.example.dto.InputDto'"); streamDefinition = new StreamDefinition("stream2", "jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + "cust-processor | router --default-output-channel=out"); - assertEquals("jdbc-mssql --cron='/10 * * * * *' " + - "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + - "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + - "cust-processor | router --default-output-channel=out", - this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("jdbc-mssql --cron='/10 * * * * *' " + + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + + "cust-processor | router --default-output-channel=out"); } @Test - public void autoQuotesOnStarProperties() { + void autoQuotesOnStarProperties() { StreamDefinition streamDefinition = new StreamDefinition("stream2", "jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + "cust-processor | router --default-output-channel=out"); - assertEquals("jdbc-mssql --cron='/10 * * * * *' " + - "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + - "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****' | " + - "cust-processor | router --default-output-channel=out", - this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))); + assertThat(this.streamDefinitionService.constructDsl(streamDefinition.getDslText(), this.streamDefinitionService.getAppDefinitions(streamDefinition))).isEqualTo("jdbc-mssql --cron='/10 * * * * *' " + + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****' | " + + "cust-processor | router --default-output-channel=out"); } } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java index 5c7e7cb71b..2458e16a5a 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/StreamDefinitionTests.java @@ -16,163 +16,162 @@ package org.springframework.cloud.dataflow.core; +import static org.assertj.core.api.Assertions.assertThat; + import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.dsl.ParseException; import org.springframework.cloud.dataflow.core.dsl.StreamParser; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; - /** * @author Mark Fisher * @author David Turanski * @author Patrick Peralta * @author Marius Bogoevici * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class StreamDefinitionTests { +class StreamDefinitionTests { StreamDefinitionService streamDefinitionService = new DefaultStreamDefinitionService(); @Test - public void testStreamCreation() { + void streamCreation() { StreamDefinition stream = new StreamDefinition("ticktock", "time | log"); - assertEquals(2, this.streamDefinitionService.getAppDefinitions(stream).size()); + assertThat(this.streamDefinitionService.getAppDefinitions(stream)).hasSize(2); StreamAppDefinition time = this.streamDefinitionService.getAppDefinitions(stream).get(0); - assertEquals("time", time.getName()); - assertEquals("time", time.getRegisteredAppName()); - assertEquals("ticktock.time", time.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION)); - assertEquals("ticktock", time.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS)); - assertFalse(time.getProperties().containsKey(BindingPropertyKeys.INPUT_DESTINATION)); + assertThat(time.getName()).isEqualTo("time"); + assertThat(time.getRegisteredAppName()).isEqualTo("time"); + assertThat(time.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_DESTINATION, "ticktock.time"); + assertThat(time.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS, "ticktock"); + assertThat(time.getProperties()).doesNotContainKey(BindingPropertyKeys.INPUT_DESTINATION); StreamAppDefinition log = this.streamDefinitionService.getAppDefinitions(stream).get(1); - assertEquals("log", log.getName()); - assertEquals("log", log.getRegisteredAppName()); - assertEquals("ticktock.time", log.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("ticktock", log.getProperties().get(BindingPropertyKeys.INPUT_GROUP)); - assertFalse(log.getProperties().containsKey(BindingPropertyKeys.OUTPUT_DESTINATION)); + assertThat(log.getName()).isEqualTo("log"); + assertThat(log.getRegisteredAppName()).isEqualTo("log"); + assertThat(log.getProperties()).containsEntry(BindingPropertyKeys.INPUT_DESTINATION, "ticktock.time"); + assertThat(log.getProperties()).containsEntry(BindingPropertyKeys.INPUT_GROUP, "ticktock"); + assertThat(log.getProperties()).doesNotContainKey(BindingPropertyKeys.OUTPUT_DESTINATION); } - + @Test - public void testLongRunningNonStreamApps() { + void longRunningNonStreamApps() { StreamDefinition sd = new StreamDefinition("something","aaa"); - assertEquals(ApplicationType.app, this.streamDefinitionService.getAppDefinitions(sd).get(0).getApplicationType()); + assertThat(this.streamDefinitionService.getAppDefinitions(sd).get(0).getApplicationType()).isEqualTo(ApplicationType.app); sd = new StreamDefinition("something","aaa|| bbb"); - assertEquals(ApplicationType.app, this.streamDefinitionService.getAppDefinitions(sd).get(0).getApplicationType()); - assertEquals(ApplicationType.app, this.streamDefinitionService.getAppDefinitions(sd).get(1).getApplicationType()); + assertThat(this.streamDefinitionService.getAppDefinitions(sd).get(0).getApplicationType()).isEqualTo(ApplicationType.app); + assertThat(this.streamDefinitionService.getAppDefinitions(sd).get(1).getApplicationType()).isEqualTo(ApplicationType.app); sd = new StreamDefinition("something","aaa --aaa=bbb || bbb"); - assertEquals(ApplicationType.app, this.streamDefinitionService.getAppDefinitions(sd).get(0).getApplicationType()); - assertEquals(ApplicationType.app, this.streamDefinitionService.getAppDefinitions(sd).get(1).getApplicationType()); + assertThat(this.streamDefinitionService.getAppDefinitions(sd).get(0).getApplicationType()).isEqualTo(ApplicationType.app); + assertThat(this.streamDefinitionService.getAppDefinitions(sd).get(1).getApplicationType()).isEqualTo(ApplicationType.app); } @Test - public void simpleStream() { + void simpleStream() { StreamDefinition streamDefinition = new StreamDefinition("test", "foo | bar"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(2, requests.size()); + assertThat(requests).hasSize(2); StreamAppDefinition source = requests.get(0); StreamAppDefinition sink = requests.get(1); - assertEquals("foo", source.getName()); - assertEquals("test", source.getStreamName()); - - assertEquals(2, source.getProperties().size()); - assertEquals("test.foo", source.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION)); - assertEquals("test", source.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS)); - assertEquals("bar", sink.getName()); - assertEquals("test", sink.getStreamName()); - assertEquals(2, sink.getProperties().size()); - assertEquals("test.foo", sink.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("test", sink.getProperties().get(BindingPropertyKeys.INPUT_GROUP)); + assertThat(source.getName()).isEqualTo("foo"); + assertThat(source.getStreamName()).isEqualTo("test"); + + assertThat(source.getProperties()).hasSize(2); + assertThat(source.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_DESTINATION, "test.foo"); + assertThat(source.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS, "test"); + assertThat(sink.getName()).isEqualTo("bar"); + assertThat(sink.getStreamName()).isEqualTo("test"); + assertThat(sink.getProperties()).hasSize(2); + assertThat(sink.getProperties()).containsEntry(BindingPropertyKeys.INPUT_DESTINATION, "test.foo"); + assertThat(sink.getProperties()).containsEntry(BindingPropertyKeys.INPUT_GROUP, "test"); } @Test - public void quotesInParams() { + void quotesInParams() { StreamDefinition streamDefinition = new StreamDefinition("test", "foo --bar='payload.matches(''hello'')' | " + "file"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(2, requests.size()); + assertThat(requests).hasSize(2); StreamAppDefinition source = requests.get(0); - assertEquals("foo", source.getName()); - assertEquals("test", source.getStreamName()); + assertThat(source.getName()).isEqualTo("foo"); + assertThat(source.getStreamName()).isEqualTo("test"); Map sourceParameters = source.getProperties(); - assertEquals(3, sourceParameters.size()); - assertEquals("payload.matches('hello')", sourceParameters.get("bar")); + assertThat(sourceParameters) + .hasSize(3) + .containsEntry("bar", "payload.matches('hello')"); } @Test - public void quotesInParams2() { + void quotesInParams2() { StreamDefinition streamDefinition = new StreamDefinition("test", "http --port=9700 | filter --expression=payload.matches('hello world') | file"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(3, requests.size()); + assertThat(requests).hasSize(3); StreamAppDefinition filter = requests.get(1); - assertEquals("filter", filter.getName()); - assertEquals("test", filter.getStreamName()); + assertThat(filter.getName()).isEqualTo("filter"); + assertThat(filter.getStreamName()).isEqualTo("test"); Map filterParameters = filter.getProperties(); - assertEquals(5, filterParameters.size()); - assertEquals("payload.matches('hello world')", filterParameters.get("expression")); + assertThat(filterParameters) + .hasSize(5) + .containsEntry("expression", "payload.matches('hello world')"); } @Test - public void parameterizedApps() { + void parameterizedApps() { StreamDefinition streamDefinition = new StreamDefinition("test", "foo --x=1 --y=two | bar --z=3"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(2, requests.size()); + assertThat(requests).hasSize(2); StreamAppDefinition source = requests.get(0); StreamAppDefinition sink = requests.get(1); - assertEquals("foo", source.getName()); - assertEquals("test", source.getStreamName()); - assertEquals(ApplicationType.source, source.getApplicationType()); + assertThat(source.getName()).isEqualTo("foo"); + assertThat(source.getStreamName()).isEqualTo("test"); + assertThat(source.getApplicationType()).isEqualTo(ApplicationType.source); Map sourceParameters = source.getProperties(); - assertEquals(4, sourceParameters.size()); - assertEquals("1", sourceParameters.get("x")); - assertEquals("two", sourceParameters.get("y")); - assertEquals("bar", sink.getName()); - assertEquals("test", sink.getStreamName()); + assertThat(sourceParameters) + .hasSize(4) + .containsEntry("x", "1") + .containsEntry("y", "two"); + assertThat(sink.getName()).isEqualTo("bar"); + assertThat(sink.getStreamName()).isEqualTo("test"); Map sinkParameters = sink.getProperties(); - assertEquals(3, sinkParameters.size()); - assertEquals("3", sinkParameters.get("z")); - assertEquals(ApplicationType.sink, sink.getApplicationType()); + assertThat(sinkParameters) + .hasSize(3) + .containsEntry("z", "3"); + assertThat(sink.getApplicationType()).isEqualTo(ApplicationType.sink); } @Test - public void sourceDestinationNameIsAppliedToSourceApp() throws Exception { + void sourceDestinationNameIsAppliedToSourceApp() throws Exception { StreamDefinition streamDefinition = new StreamDefinition("test", ":foo > goo | blah | file"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(3, requests.size()); - assertEquals("foo", requests.get(0).getProperties().get(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("test", requests.get(0).getProperties().get(BindingPropertyKeys.INPUT_GROUP)); - assertEquals(ApplicationType.processor, requests.get(0).getApplicationType()); - assertEquals(ApplicationType.processor, requests.get(1).getApplicationType()); - assertEquals(ApplicationType.sink, requests.get(2).getApplicationType()); + assertThat(requests).hasSize(3); + assertThat(requests.get(0).getProperties()).containsEntry(BindingPropertyKeys.INPUT_DESTINATION, "foo"); + assertThat(requests.get(0).getProperties()).containsEntry(BindingPropertyKeys.INPUT_GROUP, "test"); + assertThat(requests.get(0).getApplicationType()).isEqualTo(ApplicationType.processor); + assertThat(requests.get(1).getApplicationType()).isEqualTo(ApplicationType.processor); + assertThat(requests.get(2).getApplicationType()).isEqualTo(ApplicationType.sink); } @Test - public void sinkDestinationNameIsAppliedToSinkApp() throws Exception { + void sinkDestinationNameIsAppliedToSinkApp() throws Exception { StreamDefinition streamDefinition = new StreamDefinition("test", "boo | blah | aaak > :foo"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(3, requests.size()); - assertEquals("foo", requests.get(2).getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION)); + assertThat(requests).hasSize(3); + assertThat(requests.get(2).getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_DESTINATION, "foo"); } @Test - public void simpleSinkDestination() throws Exception { + void simpleSinkDestination() throws Exception { StreamDefinition streamDefinition = new StreamDefinition("test", "bart > :foo"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(1, requests.size()); - assertEquals("foo", requests.get(0).getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION)); + assertThat(requests).hasSize(1); + assertThat(requests.get(0).getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_DESTINATION, "foo"); } @Test - public void appWithBadDestination() throws Exception { + void appWithBadDestination() throws Exception { boolean isException = false; try { new StreamParser("test", "app > foo").parse(); @@ -180,56 +179,54 @@ public void appWithBadDestination() throws Exception { catch (Exception e) { isException = true; } - assertTrue(isException); + assertThat(isException).isTrue(); } @Test - public void simpleSourceDestination() throws Exception { + void simpleSourceDestination() throws Exception { StreamDefinition streamDefinition = new StreamDefinition("test", ":foo > boot"); List requests = this.streamDefinitionService.getAppDefinitions(streamDefinition); - assertEquals(1, requests.size()); - assertEquals("foo", requests.get(0).getProperties().get(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("test", requests.get(0).getProperties().get(BindingPropertyKeys.INPUT_GROUP)); + assertThat(requests).hasSize(1); + assertThat(requests.get(0).getProperties()).containsEntry(BindingPropertyKeys.INPUT_DESTINATION, "foo"); + assertThat(requests.get(0).getProperties()).containsEntry(BindingPropertyKeys.INPUT_GROUP, "test"); } @Test - public void destinationsForbiddenInComposedApps() { + void destinationsForbiddenInComposedApps() { try { new StreamDefinition("test", ":foo > boot"); } catch (ParseException expected) { - assertThat(expected.getMessage(), - containsString("A destination is not supported in this kind of definition")); - assertThat(expected.getPosition(), is(0)); + assertThat(expected.getMessage()).contains("A destination is not supported in this kind of definition"); + assertThat(expected.getPosition()).isEqualTo(0); } try { new StreamDefinition("test", "bart | goo > :foo"); } catch (ParseException expected) { - assertThat(expected.getMessage(), - containsString("A destination is not supported in this kind of definition")); - assertThat(expected.getPosition(), is(13)); + assertThat(expected.getMessage()).contains("A destination is not supported in this kind of definition"); + assertThat(expected.getPosition()).isEqualTo(13); } } @Test - public void testBindings2Apps() { + void bindings2Apps() { StreamDefinition streamDefinition = new StreamDefinition("ticktock", "time | log"); List apps = this.streamDefinitionService.getAppDefinitions(streamDefinition); StreamAppDefinition source = apps.get(0); StreamAppDefinition sink = apps.get(1); - assertEquals("time", source.getRegisteredAppName()); - assertEquals("ticktock.time", source.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION)); - assertEquals("ticktock", source.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS)); - assertFalse(source.getProperties().containsKey(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("log", sink.getRegisteredAppName()); - assertEquals("ticktock.time", sink.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("ticktock", sink.getProperties().get(BindingPropertyKeys.INPUT_GROUP)); - assertFalse(sink.getProperties().containsKey(BindingPropertyKeys.OUTPUT_DESTINATION)); + assertThat(source.getRegisteredAppName()).isEqualTo("time"); + assertThat(source.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_DESTINATION, "ticktock.time"); + assertThat(source.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS, "ticktock"); + assertThat(source.getProperties().containsKey(BindingPropertyKeys.INPUT_DESTINATION)).isFalse(); + assertThat(sink.getRegisteredAppName()).isEqualTo("log"); + assertThat(sink.getProperties()).containsEntry(BindingPropertyKeys.INPUT_DESTINATION, "ticktock.time"); + assertThat(sink.getProperties()).containsEntry(BindingPropertyKeys.INPUT_GROUP, "ticktock"); + assertThat(sink.getProperties()).doesNotContainKey(BindingPropertyKeys.OUTPUT_DESTINATION); } @Test - public void testBindings3Apps() { + void bindings3Apps() { StreamDefinition streamDefinition = new StreamDefinition("ticktock", "time | filter |log"); List apps = this.streamDefinitionService.getAppDefinitions(streamDefinition); @@ -237,20 +234,20 @@ public void testBindings3Apps() { StreamAppDefinition processor = apps.get(1); StreamAppDefinition sink = apps.get(2); - assertEquals("time", source.getRegisteredAppName()); - assertEquals("ticktock.time", source.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION)); - assertEquals("ticktock", source.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS)); - assertFalse(source.getProperties().containsKey(BindingPropertyKeys.INPUT_DESTINATION)); - - assertEquals("filter", processor.getRegisteredAppName()); - assertEquals("ticktock.time", processor.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("ticktock", processor.getProperties().get(BindingPropertyKeys.INPUT_GROUP)); - assertEquals("ticktock.filter", processor.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION)); - assertEquals("ticktock", processor.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS)); - - assertEquals("log", sink.getRegisteredAppName()); - assertEquals("ticktock.filter", sink.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION)); - assertEquals("ticktock", sink.getProperties().get(BindingPropertyKeys.INPUT_GROUP)); - assertFalse(sink.getProperties().containsKey(BindingPropertyKeys.OUTPUT_DESTINATION)); + assertThat(source.getRegisteredAppName()).isEqualTo("time"); + assertThat(source.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_DESTINATION, "ticktock.time"); + assertThat(source.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS, "ticktock"); + assertThat(source.getProperties()).doesNotContainKey(BindingPropertyKeys.INPUT_DESTINATION); + + assertThat(processor.getRegisteredAppName()).isEqualTo("filter"); + assertThat(processor.getProperties()).containsEntry(BindingPropertyKeys.INPUT_DESTINATION, "ticktock.time"); + assertThat(processor.getProperties()).containsEntry(BindingPropertyKeys.INPUT_GROUP, "ticktock"); + assertThat(processor.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_DESTINATION, "ticktock.filter"); + assertThat(processor.getProperties()).containsEntry(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS, "ticktock"); + + assertThat(sink.getRegisteredAppName()).isEqualTo("log"); + assertThat(sink.getProperties()).containsEntry(BindingPropertyKeys.INPUT_DESTINATION, "ticktock.filter"); + assertThat(sink.getProperties()).containsEntry(BindingPropertyKeys.INPUT_GROUP, "ticktock"); + assertThat(sink.getProperties()).doesNotContainKey(BindingPropertyKeys.OUTPUT_DESTINATION); } } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java index e468f0f0f1..f6b48c5395 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionTests.java @@ -20,108 +20,109 @@ import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Thomas Risberg * @author Glenn Renfro + * @author Corneil du Plessis */ -public class TaskDefinitionTests { +class TaskDefinitionTests { @Test - public void testDefinition() { + void definition() { TaskDefinition definition = new TaskDefinition("test", "timestamp"); - assertEquals("test", definition.getName()); - assertEquals("timestamp", definition.getDslText()); - assertEquals("timestamp", definition.getRegisteredAppName()); - assertEquals(1, definition.getProperties().size()); - assertEquals("test", definition.getProperties().get("spring.cloud.task.name")); + assertThat(definition.getName()).isEqualTo("test"); + assertThat(definition.getDslText()).isEqualTo("timestamp"); + assertThat(definition.getRegisteredAppName()).isEqualTo("timestamp"); + assertThat(definition.getProperties()).hasSize(1); + assertThat(definition.getProperties()).containsEntry("spring.cloud.task.name", "test"); TaskDefinition composedDef = new TaskDefinition("composed", "foo && bar"); - assertEquals("composed", composedDef.getName()); - assertEquals("foo && bar", composedDef.getDslText()); - assertEquals("composed", composedDef.getRegisteredAppName()); - assertEquals(1, composedDef.getProperties().size()); - assertEquals("composed", composedDef.getProperties().get("spring.cloud.task.name")); + assertThat(composedDef.getName()).isEqualTo("composed"); + assertThat(composedDef.getDslText()).isEqualTo("foo && bar"); + assertThat(composedDef.getRegisteredAppName()).isEqualTo("composed"); + assertThat(composedDef.getProperties()).hasSize(1); + assertThat(composedDef.getProperties()).containsEntry("spring.cloud.task.name", "composed"); } @Test - public void testPackageProtectedConstructor() { + void packageProtectedConstructor() { TaskDefinition definition = new TaskDefinition("timestamp", "label", Collections.singletonMap("spring.cloud.task.name", "label")); - assertEquals("label", definition.getName()); - assertEquals("timestamp", definition.getRegisteredAppName()); - assertEquals(1, definition.getProperties().size()); - assertEquals("label", definition.getProperties().get("spring.cloud.task.name")); + assertThat(definition.getName()).isEqualTo("label"); + assertThat(definition.getRegisteredAppName()).isEqualTo("timestamp"); + assertThat(definition.getProperties()).hasSize(1); + assertThat(definition.getProperties()).containsEntry("spring.cloud.task.name", "label"); } @Test - public void testBuilder() { + void builder() { TaskDefinition definition = new TaskDefinition.TaskDefinitionBuilder() .from(new TaskDefinition("test", "timestamp")) .build(); - assertEquals("test", definition.getName()); - assertEquals("timestamp", definition.getRegisteredAppName()); - assertEquals(1, definition.getProperties().size()); - assertEquals("test", definition.getProperties().get("spring.cloud.task.name")); + assertThat(definition.getName()).isEqualTo("test"); + assertThat(definition.getRegisteredAppName()).isEqualTo("timestamp"); + assertThat(definition.getProperties()).hasSize(1); + assertThat(definition.getProperties()).containsEntry("spring.cloud.task.name", "test"); } @Test - public void testEquality() { + void equality() { TaskDefinition definitionOne = new TaskDefinition("test", "timestamp"); TaskDefinition definitionTwo = new TaskDefinition("test", "timestamp"); - assertTrue("TaskDefinitions were expected to be equal.", definitionOne.equals(definitionTwo)); - assertTrue("TaskDefinitions were expected to be equal.", definitionOne.equals(definitionOne)); + assertThat(definitionTwo).as("TaskDefinitions were expected to be equal.").isEqualTo(definitionOne); + assertThat(definitionOne).as("TaskDefinitions were expected to be equal.").isEqualTo(definitionOne); } @Test - public void testInequality() { + void inequality() { TaskDefinition definitionOne = new TaskDefinition("test", "timestamp"); TaskDefinition definitionFoo = new TaskDefinition("test", "foo"); - assertFalse("TaskDefinitions were not expected to be equal.", definitionOne.equals(definitionFoo)); - assertFalse("TaskDefinitions were not expected to be equal.", definitionOne.equals(null)); - assertFalse("TaskDefinitions were not expected to be equal.", definitionOne.equals("HI")); + assertThat(definitionFoo).as("TaskDefinitions were not expected to be equal.").isNotEqualTo(definitionOne); + assertThat(definitionOne).as("TaskDefinitions were not expected to be equal.").isNotEqualTo(null); + assertThat(definitionOne).as("TaskDefinitions were not expected to be equal.").isNotEqualTo("HI"); } + @Test - public void testHashCode() { + void testHashCode() { TaskDefinition definitionOne = new TaskDefinition("test", "timestamp"); TaskDefinition definitionTwo = new TaskDefinition("test", "timestamp"); TaskDefinition definitionFoo = new TaskDefinition("test", "foo"); - assertTrue("TaskDefinitions' hashcodes were expected to be equal.", definitionOne.hashCode() == definitionTwo.hashCode()); - assertFalse("TaskDefinitions' hashcodes were not expected to be equal.", definitionOne.hashCode() == definitionFoo.hashCode()); + assertThat(definitionTwo.hashCode()).as("TaskDefinitions' hashcodes were expected to be equal.").isEqualTo(definitionOne.hashCode()); + assertThat(definitionOne.hashCode() == definitionFoo.hashCode()).as("TaskDefinitions' hashcodes were not expected to be equal.").isFalse(); } @Test - public void testDefinitionWithArguments() { + void definitionWithArguments() { TaskDefinition definition = new TaskDefinition("test", "timestamp --timestamp.format=yyyy"); - assertEquals("test", definition.getName()); - assertEquals("timestamp --timestamp.format=yyyy", definition.getDslText()); - assertEquals("timestamp", definition.getRegisteredAppName()); - assertEquals(2, definition.getProperties().size()); - assertEquals("test", definition.getProperties().get("spring.cloud.task.name")); - assertEquals("yyyy", definition.getProperties().get("timestamp.format")); + assertThat(definition.getName()).isEqualTo("test"); + assertThat(definition.getDslText()).isEqualTo("timestamp --timestamp.format=yyyy"); + assertThat(definition.getRegisteredAppName()).isEqualTo("timestamp"); + assertThat(definition.getProperties()).hasSize(2); + assertThat(definition.getProperties()).containsEntry("spring.cloud.task.name", "test"); + assertThat(definition.getProperties()).containsEntry("timestamp.format", "yyyy"); } @Test - public void testBuilderSetProperties() { + void builderSetProperties() { Map properties = new HashMap<>(); properties.put("foo", "bar"); - TaskDefinition definition = new TaskDefinition.TaskDefinitionBuilder() + new TaskDefinition.TaskDefinitionBuilder(); + TaskDefinition definition = TaskDefinition.TaskDefinitionBuilder .from(new TaskDefinition("test", "timestamp")) .setProperties(properties) .build(); - assertEquals("test", definition.getName()); - assertEquals("timestamp", definition.getRegisteredAppName()); - assertEquals(1, definition.getProperties().size()); - assertEquals("bar", definition.getProperties().get("foo")); + assertThat(definition.getName()).isEqualTo("test"); + assertThat(definition.getRegisteredAppName()).isEqualTo("timestamp"); + assertThat(definition.getProperties()).hasSize(1); + assertThat(definition.getProperties()).containsEntry("foo", "bar"); } } diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java index 849c36accb..b1bb0314a8 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java +++ b/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/TaskDefinitionToDslConverterTests.java @@ -16,30 +16,32 @@ package org.springframework.cloud.dataflow.core; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; + import java.util.Arrays; +import java.util.Collections; import java.util.List; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.Test; /** * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -public class TaskDefinitionToDslConverterTests { +class TaskDefinitionToDslConverterTests { @Test - public void testTaskDsl() { - assertEquals("foo --prop2=value2 --prop1=value1", - new TaskDefinitionToDslConverter().toDsl(new TaskDefinition("myTask", "foo --prop1=value1 --prop2=value2"))); + void taskDsl() { + assertThat(new TaskDefinitionToDslConverter().toDsl(new TaskDefinition("myTask", "foo --prop1=value1 --prop2=value2"))).isEqualTo("foo --prop2=value2 --prop1=value1"); } @Test - public void testExclusionOfDataFlowAddedProperties() { + void exclusionOfDataFlowAddedProperties() { - List dataFlowAddedProperties = Arrays.asList( - TaskDefinition.SPRING_CLOUD_TASK_NAME); + List dataFlowAddedProperties = Collections.singletonList( + TaskDefinition.SPRING_CLOUD_TASK_NAME); for (String key : dataFlowAddedProperties) { String dslText = "foo --" + key + "=boza"; @@ -47,13 +49,12 @@ public void testExclusionOfDataFlowAddedProperties() { System.out.println(dslText); TaskDefinition taskDefinition = new TaskDefinition("streamName", dslText); - assertEquals("foo", - new TaskDefinitionToDslConverter().toDsl(taskDefinition)); + assertThat(new TaskDefinitionToDslConverter().toDsl(taskDefinition)).isEqualTo("foo"); } } @Test - public void testPropertyAutoQuotes() { + void propertyAutoQuotes() { TaskDefinition taskDefinition = new TaskDefinition("fooTask", "foo"); @@ -66,30 +67,30 @@ public void testPropertyAutoQuotes() { .setProperty("p5", "\"k l\"") .build(); - assertEquals("foo --p1='a b' --p2=\"'c d'\" --p3=ef --p4=\"'i' 'j'\" --p5=\"k l\"", - new TaskDefinitionToDslConverter().toDsl(fooTask)); + assertThat(new TaskDefinitionToDslConverter().toDsl(fooTask)).isEqualTo("foo --p1='a b' --p2=\"'c d'\" --p3=ef --p4=\"'i' 'j'\" --p5=\"k l\""); } @Test - public void autoQuotesOnStarProperties() { + void autoQuotesOnStarProperties() { TaskDefinition taskDefinition = new TaskDefinition("fooTask", "jdbc-mssql --cron='/10 * * * * *' " + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****'"); + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****'"); - assertEquals("jdbc-mssql --cron='/10 * * * * *' " + - "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + - "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + - "--url='jdbc:sqlserver://db:1433;databaseName=Spring' --username='*****'", - new TaskDefinitionToDslConverter().toDsl(taskDefinition)); + assertThat(new TaskDefinitionToDslConverter().toDsl(taskDefinition)).isEqualTo("jdbc-mssql --cron='/10 * * * * *' " + + "--max-messages=-1 --password='******' --query='UPDATE top (100) ASSURANCE SET assurance_flag = 1 " + + "OUTPUT Inserted.* WHERE assurance_flag IS NULL' " + + "--url='jdbc:sqlserver://db:1433;encrypt=false&databaseName=Spring' --username='*****'"); } - @Test(expected = IllegalArgumentException.class) - public void compositeTaskDsl() { - TaskDefinition taskDefinition = new TaskDefinition("composedTaskName", "foo && bar"); - new TaskDefinitionToDslConverter().toDsl(taskDefinition); + @Test + void compositeTaskDsl() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { + TaskDefinition taskDefinition = new TaskDefinition("composedTaskName", "foo && bar"); + new TaskDefinitionToDslConverter().toDsl(taskDefinition); + }); } } diff --git a/spring-cloud-dataflow-dependencies/pom.xml b/spring-cloud-dataflow-dependencies/pom.xml index f98f5eb5f9..2d764d478e 100644 --- a/spring-cloud-dataflow-dependencies/pom.xml +++ b/spring-cloud-dataflow-dependencies/pom.xml @@ -4,11 +4,11 @@ spring-cloud-dataflow-dependencies-parent org.springframework.cloud - 2.8.0-SNAPSHOT - + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent spring-cloud-dataflow-dependencies - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT pom spring-cloud-dataflow-dependencies Spring Cloud Data Flow Dependencies BOM designed to support consumption of Spring Cloud Data Flow from @@ -19,108 +19,118 @@ org.springframework.cloud spring-cloud-dataflow-shell - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-shell-core - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-completion - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-core-dsl - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-core - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-rest-client - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-configuration-metadata - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-rest-resource - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-registry - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-container-registry - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-composed-task-runner - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-server-core - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-server - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-autoconfigure - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-starter-dataflow-server - 2.8.0-SNAPSHOT - - - org.springframework.cloud - spring-cloud-starter-dataflow-ui - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-starter-dataflow-server - 2.8.0-SNAPSHOT + ${dataflow.version} test-jar org.springframework.cloud spring-cloud-dataflow-audit - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-platform-kubernetes - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-platform-cloudfoundry - 2.8.0-SNAPSHOT + ${dataflow.version} org.springframework.cloud spring-cloud-dataflow-test - 2.8.0-SNAPSHOT + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-common-security-config-core + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-common-security-config-web + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-starter-common-security-config-web + ${dataflow.version} @@ -129,28 +139,36 @@ spring-docs - scp://static.springframework.org/var/www/domains/springframework.org/static/htdocs/spring-cloud/docs/${project.artifactId}/${project.version} + scp://static.springframework.org/var/www/domains/springframework.org/static/htdocs/spring-cloud/docs/${project.artifactId}/${dataflow.version} repo.spring.io Spring Release Repository - https://repo.spring.io/libs-release-local + https://repo.spring.io/libs-staging-local repo.spring.io Spring Snapshot Repository - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot spring + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -158,25 +176,25 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false - - spring-releases - Spring Releases - https://repo.spring.io/release + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 false - - - + spring-snapshots Spring Snapshots - https://repo.spring.io/libs-snapshot-local + https://repo.spring.io/snapshot true @@ -184,7 +202,7 @@ spring-milestones Spring Milestones - https://repo.spring.io/libs-milestone-local + https://repo.spring.io/milestone false diff --git a/spring-cloud-dataflow-docs/pom.xml b/spring-cloud-dataflow-docs/pom.xml index b1d18d44ee..6458f60c5c 100644 --- a/spring-cloud-dataflow-docs/pom.xml +++ b/spring-cloud-dataflow-docs/pom.xml @@ -4,69 +4,80 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-docs Spring Cloud Data Flow Docs Spring Cloud Data Flow Docs + jar ${basedir}/.. - 0.2.1.RELEASE + 0.2.5 + 3.4.1 + ${basedir}/.. org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-server + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-client + ${project.version} org.springframework.cloud spring-cloud-dataflow-shell-core + ${project.version} org.springframework.cloud spring-cloud-dataflow-completion - - - io.spring.docresources - spring-doc-resources - ${docs.resources.version} - zip - true + ${project.version} - full + docs + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + org.apache.maven.plugins maven-javadoc-plugin - 2.10.1 + ${maven-javadoc-plugin.version} attach-javadocs @@ -84,7 +95,7 @@ ${basedir}/src/main/javadoc/spring-javadoc.css - https://docs.spring.io/spring-framework/docs/${spring.version}/javadoc-api/ + https://docs.spring.io/spring-framework/docs/${javadoc-spring.version}/javadoc-api/ https://docs.spring.io/spring-shell/docs/current/api/ @@ -92,29 +103,10 @@ - - org.apache.maven.plugins - maven-dependency-plugin - - - unpack-doc-resources - - unpack-dependencies - - generate-resources - - io.spring.docresources - spring-doc-resources - zip - true - ${project.build.directory}/refdocs/ - - - - org.apache.maven.plugins maven-resources-plugin + ${maven-resources-plugin.version} copy-asciidoc-resources @@ -137,7 +129,14 @@ org.asciidoctor asciidoctor-maven-plugin - 1.5.6 + 2.2.4 + + + io.spring.asciidoctor.backends + spring-asciidoctor-backends + 0.0.5 + + ${project.build.directory}/refdocs/ ${project.build.directory}/generated-docs @@ -170,11 +169,11 @@ process-asciidoc - html5 + spring-html highlight.js book - // these attributes are required to use the doc resources + shared css/ spring.css diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc index 3482e77193..61e8df8b5a 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc @@ -15,7 +15,44 @@ In fact, the Spring Cloud Data Flow shell is a first-class consumer of that API. TIP: If you plan to use the REST API with Java, you should consider using the provided Java client (`DataflowTemplate`) that uses the REST API internally. +[[api-guide-version]] +=== HTTP Version +Spring Cloud Data Flow establishes a RESTful API version that is updated when there is a breaking change to the API. +The API version can be seen at the end of the home page of Spring Cloud Data Flow as shown in the example below: + +==== +[source,json] +---- +{ + "_links": { + "dashboard": { "href" : "http://localhost:9393/dashboard" }, + ... + }, + "api.revision":15 +} +---- +==== +The table below shows the SCDF Release version and its current RESTful API version. + +|=== +| SCDF Version | API Version + +| 2.11.x +| 14 + +| 2.10.x +| 14 + +| 2.9.x +| 14 + +| 2.8.x +| 14 + +| 2.7.x +| 14 +|=== [[api-guide-overview-http-verbs]] === HTTP verbs @@ -271,7 +308,7 @@ include::{snippets}/app-registry-documentation/get-applications-filtered/http-re [[api-guide-resources-app-registry-request-parameters]] ===== Request Parameters -include::{snippets}/app-registry-documentation/get-applications-filtered/request-parameters.adoc[] +include::{snippets}/app-registry-documentation/get-applications-filtered/query-parameters.adoc[] @@ -312,7 +349,7 @@ include::{snippets}/app-registry-documentation/get-single-application/http-reque [[api-guide-resources-app-registry-get-request-parameters]] ===== Request Parameters -include::{snippets}/app-registry-documentation/get-single-application/request-parameters.adoc[] +include::{snippets}/app-registry-documentation/get-single-application/query-parameters.adoc[] @@ -361,7 +398,7 @@ include::{snippets}/app-registry-documentation/registering-an-application/http-r [[api-guide-resources-app-registry-post-request-parameters]] ===== Request Parameters -include::{snippets}/app-registry-documentation/registering-an-application/request-parameters.adoc[] +include::{snippets}/app-registry-documentation/registering-an-application/query-parameters.adoc[] @@ -407,7 +444,7 @@ include::{snippets}/app-registry-documentation/registering-an-application-versio [[api-guide-resources-app-registry-post-versioned-request-parameters]] ===== Request Parameters -include::{snippets}/app-registry-documentation/registering-an-application-version/request-parameters.adoc[] +include::{snippets}/app-registry-documentation/registering-an-application-version/query-parameters.adoc[] @@ -455,7 +492,7 @@ include::{snippets}/app-registry-documentation/bulk-registering-apps/http-reques [[api-guide-resources-app-registry-bulk-request-parameters]] ===== Request Parameters -include::{snippets}/app-registry-documentation/bulk-registering-apps/request-parameters.adoc[] +include::{snippets}/app-registry-documentation/bulk-registering-apps/query-parameters.adoc[] @@ -618,7 +655,7 @@ include::{snippets}/audit-records-documentation/list-all-audit-records/http-requ [[api-guide-resources-audit-records-list-request-parameters]] ===== Request Parameters -include::{snippets}/audit-records-documentation/list-all-audit-records/request-parameters.adoc[] +include::{snippets}/audit-records-documentation/list-all-audit-records/query-parameters.adoc[] [[api-guide-resources-audit-records-list-example-request]] ===== Example Request @@ -765,7 +802,7 @@ include::{snippets}/stream-definitions-documentation/create-definition/http-requ [[api-guide-resources-stream-definitions-create-request-parameters]] ===== Request Parameters -include::{snippets}/stream-definitions-documentation/create-definition/request-parameters.adoc[] +include::{snippets}/stream-definitions-documentation/create-definition/query-parameters.adoc[] @@ -805,7 +842,7 @@ include::{snippets}/stream-definitions-documentation/list-all-stream-definitions [[api-guide-resources-stream-definitions-list-request-parameters]] ===== Request Parameters -include::{snippets}/stream-definitions-documentation/list-all-stream-definitions/request-parameters.adoc[] +include::{snippets}/stream-definitions-documentation/list-all-stream-definitions/query-parameters.adoc[] [[api-guide-resources-stream-definitions-list-example-request]] @@ -845,7 +882,7 @@ include::{snippets}/stream-definitions-documentation/list-related-stream-definit [[api-guide-resources-stream-definitions-list-related-request-parameters]] ===== Request Parameters -include::{snippets}/stream-definitions-documentation/list-related-stream-definitions/request-parameters.adoc[] +include::{snippets}/stream-definitions-documentation/list-related-stream-definitions/query-parameters.adoc[] @@ -1402,7 +1439,7 @@ include::{snippets}/task-definitions-documentation/create-definition/http-reques [[api-guide-resources-stream-task-definitions-creating-request-parameters]] ===== Request Parameters -include::{snippets}/task-definitions-documentation/create-definition/request-parameters.adoc[] +include::{snippets}/task-definitions-documentation/create-definition/query-parameters.adoc[] @@ -1443,7 +1480,7 @@ include::{snippets}/task-definitions-documentation/list-all-task-definitions/htt [[api-guide-resources-stream-task-definitions-list-request-parameters]] ===== Request Parameters -include::{snippets}/task-definitions-documentation/list-all-task-definitions/request-parameters.adoc[] +include::{snippets}/task-definitions-documentation/list-all-task-definitions/query-parameters.adoc[] @@ -1580,7 +1617,7 @@ include::{snippets}/task-scheduler-documentation/create-schedule/http-request.ad [[api-guide-resources-stream-task-schedule-creating-request-parameters]] ===== Request Parameters -include::{snippets}/task-scheduler-documentation/create-schedule/request-parameters.adoc[] +include::{snippets}/task-scheduler-documentation/create-schedule/query-parameters.adoc[] @@ -1621,7 +1658,7 @@ include::{snippets}/task-scheduler-documentation/list-all-schedules/http-request [[api-guide-resources-stream-task-schedule-list-request-parameters]] ===== Request Parameters -include::{snippets}/task-scheduler-documentation/list-all-schedules/request-parameters.adoc[] +include::{snippets}/task-scheduler-documentation/list-all-schedules/query-parameters.adoc[] @@ -1661,7 +1698,7 @@ include::{snippets}/task-scheduler-documentation/list-filtered-schedules/path-pa [[api-guide-resources-stream-task-schedule-list-filtered-request-parameters]] ===== Request Parameters -include::{snippets}/task-scheduler-documentation/list-filtered-schedules/request-parameters.adoc[] +include::{snippets}/task-scheduler-documentation/list-filtered-schedules/query-parameters.adoc[] @@ -1770,9 +1807,10 @@ The following topics provide more details: [[api-guide-resources-task-executions-launching]] -==== Launching a Task +==== Launching a Task (Legacy) + +Launching a task is done by requesting the creation of a new task execution. This endpoint will fail if the task is registered as a Spring Boot 3 application. -Launching a task is done by requesting the creation of a new task execution. The following topics provide more details: * <> @@ -1780,8 +1818,6 @@ The following topics provide more details: * <> * <> - - [[api-guide-resources-task-executions-launching-request-structure]] ===== Request Structure @@ -1792,7 +1828,7 @@ include::{snippets}/task-executions-documentation/launch-task/http-request.adoc[ [[api-guide-resources-task-executions-launching-request-parameters]] ===== Request Parameters -include::{snippets}/task-executions-documentation/launch-task/request-parameters.adoc[] +include::{snippets}/task-executions-documentation/launch-task/query-parameters.adoc[] @@ -1807,6 +1843,45 @@ include::{snippets}/task-executions-documentation/launch-task/curl-request.adoc[ include::{snippets}/task-executions-documentation/launch-task/http-response.adoc[] +[[api-guide-resources-task-executions-launching-boot3]] +==== Launching a Task + +Launching a task is done by requesting the creation of a new task execution. The response will contain an execution id and a schema target. + +The following topics provide more details: + +* <> +* <> +* <> +* <> + + +[[api-guide-resources-task-executions-launching-boot3-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/launch-task-boot3/http-request.adoc[] + + + +[[api-guide-resources-task-executions-launching-boot3-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/launch-task-boot3/query-parameters.adoc[] + + + +[[api-guide-resources-task-executions-launching-boot3-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/launch-task-boot3/curl-request.adoc[] + + +[[api-guide-resources-task-executions-launching-boot3-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/launch-task-boot3/http-response.adoc[] + + [[api-guide-resources-task-executions-stopping]] ==== Stopping a Task @@ -1836,7 +1911,7 @@ include::{snippets}/task-executions-documentation/stop-task/path-parameters.adoc [[api-guide-resources-task-executions-stopping-request-parameters]] ===== Request Parameters -include::{snippets}/task-executions-documentation/stop-task/request-parameters.adoc[] +include::{snippets}/task-executions-documentation/stop-task/query-parameters.adoc[] [[api-guide-resources-task-executions-stopping-example-request]] ===== Example Request @@ -1872,7 +1947,7 @@ include::{snippets}/task-executions-documentation/list-task-executions/http-requ [[api-guide-resources-task-executions-list-request-parameters]] ===== Request Parameters -include::{snippets}/task-executions-documentation/list-task-executions/request-parameters.adoc[] +include::{snippets}/task-executions-documentation/list-task-executions/query-parameters.adoc[] @@ -1913,7 +1988,7 @@ include::{snippets}/task-executions-documentation/list-task-executions-by-name/h [[api-guide-resources-task-executions-list-by-name-request-parameters]] ===== Request Parameters -include::{snippets}/task-executions-documentation/list-task-executions-by-name/request-parameters.adoc[] +include::{snippets}/task-executions-documentation/list-task-executions-by-name/query-parameters.adoc[] @@ -1930,6 +2005,164 @@ include::{snippets}/task-executions-documentation/list-task-executions-by-name/c include::{snippets}/task-executions-documentation/list-task-executions-by-name/http-response.adoc[] +[[api-guide-resources-task-thin-executions-list]] +==== List All Task Thin Executions + +The task executions endpoint lets you list all task executions with only top-level data. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + +[[api-guide-resources-task-thin-executions-list]] +==== List All Task Thin Executions + +The task executions endpoint lets you list all task executions with only top-level data. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + + +[[api-guide-resources-task-thin-executions-list-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions/query-parameters.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/list-task-thin-executions/curl-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-response.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name]] +==== List All Task Thin Executions With a Specified Task Name + +The task thin executions endpoint lets you list task executions with a specified task name. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + + +[[api-guide-resources-task-thin-executions-list-by-name-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/query-parameters.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/curl-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-response.adoc[] + +[[api-guide-resources-task-thin-executions-list-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions/request-parameters.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/list-task-thin-executions/curl-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions/http-response.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name]] +==== List All Task Thin Executions With a Specified Task Name + +The task thin executions endpoint lets you list task executions with a specified task name. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + + +[[api-guide-resources-task-thin-executions-list-by-name-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/request-parameters.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/curl-request.adoc[] + + + +[[api-guide-resources-task-thin-executions-list-by-name-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/list-task-thin-executions-by-name/http-response.adoc[] [[api-guide-resources-task-executions-detail]] ==== Task Execution Detail @@ -1947,9 +2180,9 @@ The following topics provide more details: [[api-guide-resources-task-executions-detail-request-structure]] ===== Request Structure -include::{snippets}/task-executions-documentation/launch-task-display-detail/http-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/http-request.adoc[] -include::{snippets}/task-executions-documentation/launch-task-display-detail/path-parameters.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/path-parameters.adoc[] @@ -1963,16 +2196,56 @@ There are no request parameters for this endpoint. [[api-guide-resources-task-executions-detail-example-request]] ===== Example Request -include::{snippets}/task-executions-documentation/launch-task-display-detail/curl-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/curl-request.adoc[] [[api-guide-resources-task-executions-detail-response-structure]] ===== Response Structure -include::{snippets}/task-executions-documentation/launch-task-display-detail/http-response.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/http-response.adoc[] + + +[[api-guide-resources-task-executions-detail-by-external-id]] +==== Task Execution Detail by External Id + +The task executions endpoint lets you get the details about a task execution. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + + +[[api-guide-resources-task-executions-detail-by-external-id-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/http-request.adoc[] + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/path-parameters.adoc[] + + +[[api-guide-resources-task-executions-detail-by-external-id-request-parameters]] +===== Request Parameters + +There are no request parameters for this endpoint. + + + +[[api-guide-resources-task-executions-detail-by-external-id-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/curl-request.adoc[] + + + +[[api-guide-resources-task-executions-detail-by-external-id-response-structure]] +===== Response Structure +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/http-response.adoc[] [[api-guide-resources-task-executions-delete]] ==== Delete Task Execution @@ -2017,7 +2290,7 @@ values: - CLEANUP - REMOVE_DATA -include::{snippets}/task-executions-documentation/task-execution-remove-and-task-data-remove/request-parameters.adoc[] +include::{snippets}/task-executions-documentation/task-execution-remove-and-task-data-remove/query-parameters.adoc[] [[api-guide-resources-task-executions-delete-example-request]] ===== Example Request @@ -2042,12 +2315,17 @@ include::{snippets}/task-executions-documentation/task-execution-remove-and-task include::{snippets}/task-executions-documentation/task-execution-remove-and-task-data-remove/path-parameters.adoc[] -include::{snippets}/task-executions-documentation/task-execution-remove-and-task-data-remove/request-parameters.adoc[] +include::{snippets}/task-executions-documentation/task-execution-remove-and-task-data-remove/query-parameters.adoc[] IMPORTANT: When deleting data from the persistence store by using the `REMOVE_DATA` action parameter, you must provide task execution IDs that represent parent task executions. When you provide child task executions (executed as part of a composed task), a `400` (Bad Request) HTTP status is returned. +NOTE: When deleting large number of task executions some database types limit the number of entries in the `IN` clause (the method Spring Cloud Data Flow uses to delete relationships for task executions). +Spring Cloud Data Flow supports the chunking of deletes for Sql Server (Maximum 2100 entries) and Oracle DBs (Maximum 1000 entries). +However, Spring Cloud Data Flow allows users to set their own chunking factor. To do this set the `spring.cloud.dataflow.task.executionDeleteChunkSize` property to the appropriate chunk size. +Default is `0` which means Spring Cloud Data Flow will not chunk the task execution deletes (except for Oracle and Sql Server databases). + [[api-guide-resources-task-executions-current-count]] ==== Task Execution Current Count @@ -2063,7 +2341,7 @@ The following topics provide more details: [[api-guide-resources-task-executions-current-count-request-structure]] ===== Request Structure -include::{snippets}/task-executions-documentation/launch-task-current-count/http-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-current-count/http-request.adoc[] [[api-guide-resources-task-executions-current-count-request-parameters]] ===== Request Parameters @@ -2073,12 +2351,12 @@ There are no request parameters for this endpoint. [[api-guide-resources-task-executions-current-count-example-request]] ===== Example Request -include::{snippets}/task-executions-documentation/launch-task-current-count/curl-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-current-count/curl-request.adoc[] [[api-guide-resources-task-executions-current-count-response-structure]] ===== Response Structure -include::{snippets}/task-executions-documentation/launch-task-current-count/http-response.adoc[] +include::{snippets}/task-executions-documentation/get-task-current-count/http-response.adoc[] [[api-guide-resources-job-executions]] @@ -2123,7 +2401,7 @@ include::{snippets}/job-executions-documentation/list-job-executions/http-reques [[api-guide-resources-job-executions-list-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/list-job-executions/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/list-job-executions/query-parameters.adoc[] @@ -2162,7 +2440,7 @@ include::{snippets}/job-executions-documentation/list-thin-job-executions/http-r [[api-guide-resources-job-executions-thin-job-execution-list-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/list-thin-job-executions/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/list-thin-job-executions/query-parameters.adoc[] @@ -2201,7 +2479,7 @@ include::{snippets}/job-executions-documentation/list-job-executions-by-name/htt [[api-guide-resources-job-executions-list-by-name-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/list-job-executions-by-name/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/list-job-executions-by-name/query-parameters.adoc[] @@ -2240,7 +2518,7 @@ include::{snippets}/job-executions-documentation/list-thin-job-executions-by-nam [[api-guide-resources-job-executions-thin-list-by-name-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/list-thin-job-executions-by-name/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/list-thin-job-executions-by-name/query-parameters.adoc[] @@ -2279,7 +2557,7 @@ include::{snippets}/job-executions-documentation/list-thin-job-executions-by-dat [[api-guide-resources-job-executions-thin-list-by-date-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/list-thin-job-executions-by-date/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/list-thin-job-executions-by-date/query-parameters.adoc[] @@ -2318,7 +2596,7 @@ include::{snippets}/job-executions-documentation/list-thin-job-executions-by-job [[api-guide-resources-job-executions-thin-list-by-job-instance-id-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/list-thin-job-executions-by-job-instance-id/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/list-thin-job-executions-by-job-instance-id/query-parameters.adoc[] @@ -2357,7 +2635,7 @@ include::{snippets}/job-executions-documentation/list-thin-job-executions-by-tas [[api-guide-resources-job-executions-thin-list-by-task-execution-id-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/list-thin-job-executions-by-task-execution-id/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/list-thin-job-executions-by-task-execution-id/query-parameters.adoc[] @@ -2441,7 +2719,7 @@ include::{snippets}/job-executions-documentation/job-stop/path-parameters.adoc[] [[api-guide-resources-job-executions-stop-request-parameters]] ===== Request parameters -include::{snippets}/job-executions-documentation/job-stop/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/job-stop/query-parameters.adoc[] @@ -2484,7 +2762,7 @@ include::{snippets}/job-executions-documentation/job-restart/path-parameters.ado [[api-guide-resources-job-executions-restart-request-parameters]] ===== Request Parameters -include::{snippets}/job-executions-documentation/job-restart/request-parameters.adoc[] +include::{snippets}/job-executions-documentation/job-restart/query-parameters.adoc[] @@ -2536,7 +2814,7 @@ include::{snippets}/job-instances-documentation/list-job-instances/http-request. [[api-guide-resources-job-instances-list-request-parameters]] ===== Request Parameters -include::{snippets}/job-instances-documentation/list-job-instances/request-parameters.adoc[] +include::{snippets}/job-instances-documentation/list-job-instances/query-parameters.adoc[] @@ -2632,7 +2910,7 @@ include::{snippets}/job-step-executions-documentation/list-step-executions-for-j [[api-guide-resources-job-step-executions-list-request-parameters]] ===== Request Parameters -include::{snippets}/job-step-executions-documentation/list-step-executions-for-job/request-parameters.adoc[] +include::{snippets}/job-step-executions-documentation/list-step-executions-for-job/query-parameters.adoc[] @@ -2732,8 +3010,6 @@ include::{snippets}/job-step-executions-documentation/step-progress/curl-request include::{snippets}/job-step-executions-documentation/step-progress/http-response.adoc[] - - [[api-guide-resources-runtime-information-applications]] === Runtime Information about Applications @@ -2945,7 +3221,7 @@ include::{snippets}/task-logs-documentation/get-logs-by-task-id/http-request.ado [[api-guide-resources-task-logs-by-task-id-request-parameters]] ===== Request Parameters -include::{snippets}/task-logs-documentation/get-logs-by-task-id/request-parameters.adoc[] +include::{snippets}/task-logs-documentation/get-logs-by-task-id/query-parameters.adoc[] [[api-guide-resources-task-logs-by-task-id-example-request]] ===== Example Request @@ -2957,3 +3233,34 @@ include::{snippets}/task-logs-documentation/get-logs-by-task-id/curl-request.ado ===== Response Structure include::{snippets}/task-logs-documentation/get-logs-by-task-id/http-response.adoc[] + +[[api-guide-openapi]] +== OpenAPI + +The https://springdoc.org/#Introduction[Springdoc] library is integrated with the server in an opt-in fashion. Once enabled, it provides OpenAPI3 documentation and a Swagger UI. + +To enable, set the following properties in your `application.yml` prior to launching the server: +```yaml +springdoc: + api-docs: + enabled: true + swagger-ui: + enabled: true +``` +The properties can also be set on the command line: +```shell +-Dspringdoc.api-docs.enabled=true -Dspringdoc.swagger-ui.enabled=true +``` +or as environment variables: +```shell +SPRINGDOC_APIDOCS_ENABLED=true +SPRINGDOC_SWAGGERUI_ENABLED=true +``` + +Once enabled, the OpenAPI3 docs and Swagger UI are available at the `/v3/api-docs` and `/swagger-ui/index.html` URIs, respectively (eg. http://localhost:9393/v3/api-docs). + +TIP: The Swagger UI will be initially be blank. Type in "/v3/api-docs/" in the "Explore" bar and click "Explore". + +TIP: If you try out the API's in the Swagger UI and get errors related to `"No property string found for type"` try replacing the **pageable** parameter with `{ }` or removing its `"sort"` attribute. + +There are a plethora of available https://springdoc.org/#properties[OpenAPI] and https://springdoc.org/#swagger-ui-properties[Swagger UI] properties to configure the feature. \ No newline at end of file diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc deleted file mode 100644 index 25628378e6..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc +++ /dev/null @@ -1,81 +0,0 @@ -[appendix] -[[building]] -== Building - -This appendix describes how to build Spring Cloud Data Flow. - -To build the source, you need to install JDK 1.8. - -The build uses the Maven wrapper so that you do not have to install a specific version of Maven. - -The main build command is as follows: - -==== -[source,bash] ----- -$ ./mvnw clean install ----- -==== - -To speed up the build, you can add `-DskipTests` to avoid running the tests. - -NOTE: You can also install Maven (>=3.3.3) yourself and run the `mvn` command in place of `./mvnw` in the examples below. -If you do that, you also might need to add `-P spring` if your local Maven settings do not contain repository declarations for Spring pre-release artifacts. - -NOTE: You might need to increase the amount of memory available to Maven by setting a `MAVEN_OPTS` environment variable with a value similar to `-Xmx512m -XX:MaxPermSize=128m`. -We try to cover this in the `.mvn` configuration, so, if you find you have to do it to make a build succeed, please raise a ticket to get the settings added to source control. - -=== Documentation - -There is a `full` profile that generates documentation. You can build only the documentation by using the following command: - -==== -[source,bash] ----- -$ ./mvnw clean package -DskipTests -P full -pl {project-artifactId} -am ----- -==== - -=== Working with the Code - -If you do not have a favorite IDE, we recommend that you use https://spring.io/tools[Spring Tools Suite] or https://www.eclipse.org[Eclipse] when working with the code. -We use the https://www.eclipse.org/m2e/[m2eclipse] Eclipse plugin for Maven support. -Other IDEs and tools generally also work without issue. - - - -==== Importing into Eclipse with m2eclipse - -We recommend the https://www.eclipse.org/m2e/[m2eclipe] eclipse plugin when working with Eclipse. -If you do not already have m2eclipse installed, it is available from the Eclipse marketplace. - -Unfortunately, m2e does not yet support Maven 3.3. -Consequently, once the projects are imported into Eclipse, you also need to tell m2eclipse to use the `.settings.xml` file for the projects. -If you do not do this, you may see many different errors related to the POMs in the projects. -To do so: - -. Open your Eclipse preferences. -. Expand the *Maven preferences*. -. Select *User Settings*. -. In the *User Settings* field, click *Browse* and navigate to the Spring Cloud project you imported. -. Select the `.settings.xml` file in that project. -. Click *Apply*. -. Click *OK*. - -NOTE: Alternatively, you can copy the repository settings from Spring Cloud's https://github.com/spring-cloud/spring-cloud-build/blob/master/.settings.xml[`.settings.xml`] file into your own `~/.m2/settings.xml`. - - - -==== Importing into Eclipse without m2eclipse - -If you prefer not to use m2eclipse, you can generate Eclipse project metadata by using the following command: - -==== -[source,bash] ----- -$ ./mvnw eclipse:eclipse ----- -==== - -You can import the generated Eclipse projects by selecting *Import existing projects* -from the *File* menu. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc deleted file mode 100644 index a5fda2cdc7..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc +++ /dev/null @@ -1,32 +0,0 @@ -[appendix] -[[contributing]] -== Contributing - -Spring Cloud is released under the non-restrictive Apache 2.0 license and follows a very standard Github development process, using Github tracker for issues and merging pull requests into the master branch. -If you want to contribute even something trivial, please do not hesitate, but do please follow the guidelines in this appendix. - - - -=== Sign the Contributor License Agreement - -Before we accept a non-trivial (anything more than correcting a typographical error) patch or pull request, we need you to sign the https://cla.pivotal.io[contributor's agreement]. -Signing the contributor's agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you get an author credit if we do. -Active contributors might be asked to join the core team and be given the ability to merge pull requests. - - - -=== Code Conventions and Housekeeping - -None of the following guidelines is essential for a pull request, but they all help your fellow developers understand and work with your code. -They can also be added after the original pull request but before a merge. - -* Use the Spring Framework code format conventions. If you use Eclipse, you can import formatter settings by using the `eclipse-code-formatter.xml` file from the https://github.com/spring-cloud/spring-cloud-build/blob/master/spring-cloud-dependencies-parent/eclipse-code-formatter.xml[Spring Cloud Build] project. -If you use IntelliJ, you can use the https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter Plugin] to import the same file. -* Make sure all new `.java` files have a simple Javadoc class comment with at least an `@author` tag identifying you, and preferably at least a paragraph describing the class's purpose. -* Add the ASF license header comment to all new `.java` files (to do so, copy it from existing files in the project). -* Add yourself as an `@author` to the .java files that you modify substantially (more than cosmetic changes). -* Add some Javadocs and, if you change the namespace, some XSD doc elements. -* A few unit tests would help a lot as well. Someone has to do it, and your fellow developers appreciate the effort. -* If no one else uses your branch, rebase it against the current master (or other target branch in the main project). -* When writing a commit message, follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions]. -If you fix an existing issue, add `Fixes gh-XXXX` (where XXXX is the issue number) at the end of the commit message. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-create-containers.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-create-containers.adoc new file mode 100644 index 0000000000..91714cad5b --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-create-containers.adoc @@ -0,0 +1,120 @@ +[[create-containers]] + +=== Create containers for architectures not supported yet. +In the case of macOS on M1 the performance of amd64/x86_64 is unacceptable. +We provide a set of scripts that can be used to download specific versions of published artifacts. +We also provide a script that will create a container using the downloaded artifact for the host platform. +In the various projects you will find then in `src/local` or `local` folders. + +[cols="1,4,6"] +|=== +|Project | Scripts | Notes + +| [.small]#Data Flow# +a| +[.small]#`src/local/download-apps.sh`# + +[.small]#`src/local/create-containers.sh`# + +a|[.small]#Download or create container for: `spring-cloud-dataflow-server`,# + +[.small]#`spring-cloud-dataflow-composed-task-runner`,# + +[.small]#`spring-cloud-dataflow-single-step-batch-job`,# + +[.small]#`spring-cloud-dataflow-tasklauncher-sink-kafka`,# + +[.small]#`spring-cloud-dataflow-tasklauncher-sink-rabbit`# + + +|[.small]#Skipper# +a| +[.small]#`local/download-app.sh`# + +[.small]#`local/create-container.sh`# +|[.small]#Download or create container for: `spring-cloud-skipper-server`# + +|[.small]#Stream Applications# +a| +[.small]#`local/download-apps.sh`# + +[.small]#`local/create-containers.sh`# + +[.small]#`local/pack-containers.sh`# +| `create-containers.sh` uses `jib` + +`pack-containers.sh` uses `pack` +|=== + +==== Scripts in `spring-cloud-dataflow` +===== `src/local/download-apps.sh` +Downloads all applications needed by `create-containers.sh` from Maven repository. + +*If the timestamp of snapshots matches the download will be skipped.* + +Usage: `download-apps.sh [version]` + +* `version` is the dataflow-server version like `2.11.3`. Default is `2.11.3-SNAPSHOT` + +===== `src/local/create-containers.sh` +Creates all containers and pushes to local docker registry. + +This script requires link:https://github.com/GoogleContainerTools/jib/tree/master/jib-cli[jib-cli] + +Usage: `create-containers.sh [version] [jre-version]` + +* `version` is the dataflow-server version like `2.11.3`. Default is `2.11.3-SNAPSHOT` +* `jre-version` should be one of 11, 17. Default is 11 + +==== Scripts in `spring-cloud-skipper` + +===== `local/download-app.sh` +Downloads all applications needed by `create-containers.sh` from Maven repository. + +*If the timestamp of snapshots matches the download will be skipped.* + +Usage: `download-app.sh [version]` + +* `version` is the skipper version like `2.11.3` or default is `2.11.3-SNAPSHOT` + +===== `local/create-container.sh` +Creates all containers and pushes to local docker registry. +This script requires link:https://github.com/GoogleContainerTools/jib/tree/master/jib-cli[jib-cli] + +Usage: `create-containers.sh [version] [jre-version]` + +* `version` is the skipper version like `2.11.3` or default is `2.11.3-SNAPSHOT` +* `jre-version` should be one of 11, 17 + +==== Scripts in `stream-applications` + +===== `local/download-apps.sh` + +Downloads all applications needed by `create-containers.sh` from Maven repository. + +*If the timestamp of snapshots matches the download will be skipped.* + +Usage: `download-apps.sh [version] [broker] [filter]` + +* `version` is the stream applications version like `4.0.0` or default is `4.0.1-SNAPSHOT` +* `broker` is one of rabbitmq, rabbit or kafka +* `filter` is a name of an application or a partial name that will be matched. + +===== `local/create-containers.sh` +Creates all containers and pushes to local docker registry. + +This script requires link:https://github.com/GoogleContainerTools/jib/tree/master/jib-cli[jib-cli] + +Usage: `create-containers.sh [version] [broker] [jre-version] [filter]` + +* `version` is the stream-applications version like `4.0.0` or default is `4.0.1-SNAPSHOT` +* `broker` is one of rabbitmq, rabbit or kafka +* `jre-version` must be 17 +* `filter` is a name of an application or a partial name that will be matched. + +If the file is not present required to create the container the script will skip the one. + +===== `local/pack-containers.sh` +Creates all containers and pushes to local docker registry. + +This script requires link:https://buildpacks.io/docs/tools/pack[packeto pack] + +Usage: `pack-containers.sh [version] [broker] [jre-version] [filter]` + +* `version` is the stream-applications version like `4.0.0` or default is `4.0.1-SNAPSHOT` +* `broker` is one of rabbitmq, rabbit or kafka +* `jre-version` must be 17 +* `filter` is a name of an application or a partial name that will be matched. + +If the required file is not present to create the container the script will skip that one. + +NOTE: If any parameter is provided all those to the left of it should be considered required. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc index f37dc76c99..bb99fc27b9 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-dataflow-template.adoc @@ -180,6 +180,3 @@ To configure _Basic Authentication_, the following setup is required: ---- ==== -You can find a sample application as part of the -https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/master/dataflow-template-example[spring-cloud-dataflow-samples] repository -on GitHub. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-development-tasks.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-development-tasks.adoc new file mode 100644 index 0000000000..3e80ef1ddc --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-development-tasks.adoc @@ -0,0 +1,7 @@ +[appendix] +[[development-tasks]] +== Development Tasks + +include::appendix-extend-classpath.adoc[] +include::appendix-create-containers.adoc[] +include::appendix-local-k8s-development.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-extend-classpath.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-extend-classpath.adoc new file mode 100644 index 0000000000..5dbe44a8ff --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-extend-classpath.adoc @@ -0,0 +1,47 @@ +[[extend-classpath]] + +=== Extending application classpath + +Users may require the addition of dependencies to the existing Stream applications or specific database drivers to Dataflow and Skipper or any of the other containers provider by the project. + +NOTE: The Spring Cloud Dataflow repository contains scripts to help with this task. The examples below assume you have cloned the `spring-cloud-dataflow` repository and are executing the scripts from `src/add-deps`. + +==== JAR File + +_We suggest you publish the updated jar it to a private Maven repository and that the Maven Coordinates of the private registry is then used to register application with SCDF._ + +===== Example + +This example: +* assumes the jar is downloaded to `${appFolder}/${appName}-${appVersion}.jar` +* adds the dependencies and then publishes the jar to Maven local. + +[source,shell] +.... +./gradlew -i publishToMavenLocal \ + -P appFolder="." \ + -P appGroup="org.springframework.cloud" \ + -P appName="spring-cloud-dataflow-server" \ + -P appVersion="2.11.3" \ + -P depFolder="./extra-libs" +.... + +NOTE: Use the `publishMavenPublicationToMavenRepository` task to publish to a remote repository. Update the `gradle.properties` with the remote repository details. Alternatively move `repoUser` and `repoPassword` to ~/.gradle/gradle.properties + +==== Containers + +In order to create a container we suggest using https://buildpacks.io/docs/for-platform-operators/how-to/integrate-ci/pack[paketo pack cli] to create a container from the jar created in previous step. + +[source, shell] +.... +REPO=springcloud/spring-cloud-dataflow-server +TAG=2.11.3 +JAR=build/spring-cloud-dataflow-server-${TAG}.jar +JAVA_VERSION=8 +pack build --builder gcr.io/paketo-buildpacks/builder:base \ + --path "$JAR" \ + --trust-builder --verbose \ + --env BP_JVM_VERSION=${JAVA_VERSION} "$REPO:$TAG-jdk${JAVA_VERSION}-extra" +.... + +NOTE: Publish the container to a private container registry and register the application docker uri with SCDF. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc index 41e131b6fd..d1e6a6b337 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-howto.adoc @@ -9,28 +9,37 @@ That is also a great place to ask new questions (use the `spring-cloud-dataflow` We are also more than happy to extend this section. If you want to add a "`how-to`", you can send us a {github-code}[pull request]. - - === Configure Maven Properties +If applications are resolved by using the Maven repository you may want to configure the underlying resolver. You can set the Maven properties, such as the local Maven repository location, remote Maven repositories, authentication credentials, and proxy server properties through command-line properties when you start the Data Flow server. Alternatively, you can set the properties by setting the `SPRING_APPLICATION_JSON` environment property for the Data Flow server. -The remote Maven repositories need to be configured explicitly if the applications are resolved by using the Maven repository, except for a `local` Data Flow server. -The other Data Flow server implementations (which use Maven resources for application artifacts resolution) have no default value for remote repositories. -The `local` server has `https://repo.spring.io/libs-snapshot` as the default remote repository. +For all Data Flow server installations, the following list of remote Maven repositories are configured by default: + +* Maven Central (`https://repo.maven.apache.org/maven2`) +* Spring Snapshots (`https://repo.spring.io/snapshot`) +* Spring Milestones (`https://repo.spring.io/milestone`) + +If the default is already explicitly configured (exact match on the repo url) then it will not be included. + +If the applications exist on another remote repository, besides the pre-configured ones, that remote repository must be configured explicitly and will be added to the pre-configured default list. + +TIP: To skip the automatic default repositories behavior altogether, set the `maven.include-default-remote-repos` property to `false`. To pass the properties as command-line options, run the server with a command similar to the following: ==== -[source,bash] +[source,shell] ---- -$ java -jar .jar --maven.localRepository=mylocal ---maven.remote-repositories.repo1.url=https://repo1 ---maven.remote-repositories.repo1.auth.username=repo1user ---maven.remote-repositories.repo1.auth.password=repo1pass ---maven.remote-repositories.repo2.url=https://repo2 --maven.proxy.host=proxyhost ---maven.proxy.port=9018 --maven.proxy.auth.username=proxyuser +java -jar .jar --maven.localRepoitory=mylocal \ +--maven.remote-repositories.repo1.url=https://repo1 \ +--maven.remote-repositories.repo1.auth.username=repo1user \ +--maven.remote-repositories.repo1.auth.password=repo1pass \ +--maven.remote-repositories.repo2.url=https://repo2 \ +--maven.proxy.host=proxyhost \ +--maven.proxy.port=9018 \ +--maven.proxy.auth.username=proxyuser \ --maven.proxy.auth.password=proxypass ---- ==== @@ -78,16 +87,34 @@ SPRING_APPLICATION_JSON='{ ---- ==== -NOTE: Depending on the Spring Cloud Data Flow server implementation, you may have to pass the environment properties by using the platform specific environment-setting capabilities. For instance, in Cloud Foundry, you would pass them as `cf set-env SPRING_APPLICATION_JSON`. +You can also set the properties as individual environment variables: +==== +[source,bash] +---- +export MAVEN_REMOTEREPOSITORIES_REPO1_URL=https://repo1 +export MAVEN_REMOTEREPOSITORIES_REPO1_AUTH_USERNAME=repo1user +export MAVEN_REMOTEREPOSITORIES_REPO1_AUTH_PASSWORD=repo1pass +export MAVEN_REMOTEREPOSITORIES_REPO2_URL=https://repo2 +export MAVEN_PROXY_HOST=proxyhost +export MAVEN_PROXY_PORT=9018 +export MAVEN_PROXY_AUTH_USERNAME=proxyuser +export MAVEN_PROXY_AUTH_PASSWORD=proxypass +---- +==== === Troubleshooting This section covers how to troubleshoot Spring Cloud Data Flow on your platform of choice. See the Troubleshooting sections of the microsite for link:https://dataflow.spring.io/docs/stream-developer-guides/troubleshooting/[Stream] and link:https://dataflow.spring.io/docs/batch-developer-guides/troubleshooting/[Batch] processing. +include::appendix-extend-classpath.adoc[] +include::appendix-create-containers.adoc[] +include::appendix-local-k8s-development.adoc[] [[faqs]] === Frequently Asked Questions In this section, we review the frequently asked questions for Spring Cloud Data Flow. See the https://dataflow.spring.io/docs/resources/faq/[Frequently Asked Questions] section of the microsite for more information. + +// TODO move the FAQ to reference guide. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc index 551f74fee2..8ea1d8c26b 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-identity-provider-azure.adoc @@ -247,6 +247,8 @@ $ java -jar spring-cloud-dataflow-shell.jar \ ---- ==== +NOTE: A Public Client requires *App Roles* with the value set the same as the internal permissions `[dataflow.create, dataflow.deploy, dataflow.destroy, dataflow.manage, dataflow.modify, dataflow.schedule, dataflow.view]` to ensure they are added to the access token. + Starting a public shell and (optionally) pass credentials as options: ==== diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-local-k8s-development.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-local-k8s-development.adoc new file mode 100644 index 0000000000..6f83aa4f84 --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-local-k8s-development.adoc @@ -0,0 +1,246 @@ +[[local-k8s-development]] + +=== Configure Kubernetes for local development or testing + +==== Prerequisites + +You will need to install kubectl and then kind or minikube for a local cluster. + +All the examples assume you have cloned the `spring-cloud-dataflow` repository and are executing the scripts from `deploy/k8s`. + +On macOS, you may need to install `realpath` from link:https://ports.macports.org/port/realpath/[Macports] or `brew install realpath` + +NOTE: The scripts require a shell like `bash` or `zsh` and should work on Linux, WSL 2 or macOS. + +==== Steps +* Choose Kubernetes provider. Kind, Minikube or remote GKE or TMC. +* Decide the namespace to use for deployment if not `default`. +* Configure Kubernetes and loadbalancer. +* Choose Broker with `export BROKER=kafka|rabbitmq` +* Build or Pull container images for Skipper and Data Flow Server. +* Deploy and Launch Spring Cloud Data Flow. +* Export Data Flow Server address to env. + +===== Kubernetes Provider + +_How do I choose between minikube and kind? kind will generally provide quicker setup and teardown time than Minikube. There is little to choose in terms of performance between the 2 apart from being able to configure limits on CPUs and memory when deploying minikube. So in the case where you have memory constraints or need to enforce memory limitations Minikube will be a better option._ + +===== Kubectl + +You will need to link:https://kubernetes.io/docs/tasks/tools/[install] kubectl in order to configure the Kubernetes cluster + +===== Kind + +Kind is Kubernetes in docker and ideal for local development. + +* link:https://kind.sigs.k8s.io/docs/user/quick-start/[Installation] +* link:https://kind.sigs.k8s.io/docs/user/loadbalancer/[LoadBalancer] + +The LoadBalancer will be installed by the `configure-k8s.sh` script by will require an update to a yaml file to provide the address range available to the LoadBalancer. + +===== Minikube + +Minikube uses one of a selection of drivers to provide a virtualization environment. + +* link:https://minikube.sigs.k8s.io/docs/start/[Installation] +* link:https://minikube.sigs.k8s.io/docs/start/#loadbalancer-deployments[LoadBalancer] + +NOTE: Delete existing Minikube installation if you have any. `minikube delete` + +===== Remote TMC Cluster + +link:https://tanzu.vmware.com/mission-control[Tanzu Mission Control] + +==== Building and loading containers. + +For local development you need control of the containers used in the local environment. + +In order to ensure to manage the specific versions of data flow and skipper containers you can set SKIPPER_VERSION and DATAFLOW_VERSION environmental variable and then invoke `./images/pull-dataflow.sh` and `./images/pull-skipper.sh` or if you want to use a locally built application you can invoke `./images/build-skipper-image.sh` and `./images/build-dataflow.sh` + +==== Configure k8s environment + +You can invoke one of the following scripts to choose the type of installation you are targeting: + +[source,shell] +---- +./k8s/use-kind.sh [] [] [] +./k8s/use-mk-docker.sh [] [] [] +./k8s/use-mk-kvm2.sh [] [] [] +./k8s/use-mk.sh [] [] [] # <1> +./k8s/use-tmc.sh [] [] [] +./k8s/use-gke.sh [] [] [] +---- +<1> must be one of `kvm2`, `docker`, `vmware`, `virtualbox`, `vmwarefusion` or `hyperkit`. `docker` is the recommended option for local development. + +NOTE: `` will be `default` if not provided. The default `` is `postgresql` and the default `` is `kafka`. + +Since these scripts export environmental variable they need to be executes as in the following example: + +[source,shell] +.... +source ./k8s/use-mk-docker.sh postgresql rabbitmq --namespace test-ns +.... + +===== TMC or GKE Cluster in Cloud + +The cluster must exist before use, and you should use the relevant cli to login before executing `source ./k8s/use-gke.sh` + +===== Create Local Cluster. + +The following script will create the local cluster. + +[source,shell] +.... +# Optionally add to control cpu and memory allocation. +export MK_ARGS="--cpus=8 --memory=12g" +./k8s/configure-k8s.sh +.... + +* For *kind* follow instruction to update `./k8s/yaml/metallb-configmap.yaml` and then apply using `kubectl apply -f ./k8s/yaml/metallb-configmap.yaml` + +* For *minikube* launch a new shell and execute `minikube tunnel` + +===== Deploy Spring Cloud Data Flow. + +The `use-*` scripts will configure the values of BROKER and DATABASE. + +====== Configure Broker +[source,shell] +.... +export BROKER= # <1> +.... +<1> one of `kafka` or `rabbitmq` + +====== Configure Database + +[source,shell] +.... +export DATABASE= # <1> +.... +<1> one of `mariadb` or `postgresql` + +Docker credentials need to be configured for Kubernetes to pull the various container images. + +For Docker Hub you can create a personal free account and use a personal access token as your password. + +Test your docker login using `./k8s/docker-login.sh` + +[source,shell] +.... +export DOCKER_SERVER=https://docker.io +export DOCKER_USER= +export DOCKER_PASSWORD= +export DOCKER_EMAIL= +.... + +Set the version of Spring Cloud Data Flow and Skipper. + +This example shows the versions of the current development snapshot. + +[source,shell] +.... +export DATAFLOW_VERSION=2.11.5-SNAPSHOT +export SKIPPER_VERSION=2.11.5-SNAPSHOT +.... + +Before you can install SCDF you will need to pull the following images to ensure they are present for uploading to the k8s cluster. + +You can configure the before `pull-app-images` and `install-scdf`: + +* `STREAM_APPS_RT_VERSION` Stream Apps Release Train Version. _Default is 2022.0.0_. +* `STREAM_APPS_VERSION` Stream Apps Version. _Default is 4.0.0_. + +Use: + +[source,shell] +.... +./images/pull-app-images.sh +./images/pull-dataflow.sh +./images/pull-skipper.sh +./images/pull-composed-task-runner.sh +.... + +[source,shell] +.... +./k8s/install-scdf.sh +source ./k8s/export-dataflow-ip.sh +.... + +NOTE: You can now execute scripts from `./shell` to deploy some simple streams and tasks. You can also run `./shell/shell.sh` to run the Spring Cloud Data Flow Shell. + + +If you want to start fresh you use the following to delete the SCDF deployment and then run `./k8s/install-scdf.sh` to install it again. + + +===== Delete the deployment from the cluster. + +[source,shell] +.... +./k8s/delete-scdf.sh +.... + +===== Delete the cluster + +This script will also delete the TMC cluster if you have configured one. + +[source,shell] +.... +./k8s/destroy-k8s.sh +.... + +==== Utilities +The following list of utilities may prove useful. + +[cols="2m,8"] +|=== +|Name | Description + +| link:https://k9scli.io/[k9s] | k9s is a text based monitor to explore the Kubernetes cluster. +| link:https://github.com/boz/kail[kail] | Extra and tail the logs of various pods based on various naming criteria. +|=== + +===== `kail` + + +* Using kail to log activity related to a specific stream. + +[source,shell] +---- +kail --label=spring-group-id= +---- +* Using kail to log all pods in specific namespace. + +[source,shell] +---- +kail --ns= +---- + +==== Scripts + +Some of the scripts apply to local containers as well and can be found in `src/local`, the Kubernetes specific scripts are in `deploy/k8s` + +[cols="5m,10"] +|=== +|Script |Description + +| ./images/build-app-images.sh | Build all images of Restaurant Sample Stream Apps +| ./images/pull-app-images.sh | Pull all images of Restaurant Sample Stream Apps from Docker Hub +| ./images/pull-dataflow.sh | Pull dataflow from DockerHub based on `DATAFLOW_VERSION`. +| ./images/pull-scdf-pro.sh | Pull Dataflow Pro from Tanzu Network based on `SCDF_PRO_VERSION`. +| ./images/pull-skipper.sh | Pull Skipper from DockerHub base on the `SKIPPER_VERSION`. +| ./images/build-dataflow-image.sh | Build a docker image from the local repo of Dataflow +| ./images/build-scdf-pro-image.sh | Build a docker image from the local repo of Dataflow Pro. Set `USE_PRO=true` in environment to use Dataflow Pro +| ./images/build-skipper-image.sh | Build a docker image from the local repo of Skipper. +| ./k8s/configure-k8s.sh | Configure the Kubernetes environment based on your configuration of K8S_DRIVER. +| ./k8s/delete-scdf.sh | Delete all Kubernetes resources create by the deployment. +| ./k8s/destroy-k8s.sh | Delete cluster, kind or minikube. +| ./k8s/export-dataflow-ip.sh | Export the url of the data flow server to `DATAFLOW_IP` +| ./k8s/export-http-url.sh | Export the url of the http source of a specific flow by name to `HTTP_APP_URL` +| ./k8s/install-scdf.sh | Configure and deploy all the containers for Spring Cloud Dataflow +| ./k8s/load-images.sh | Load all container images required by tests into kind or minikube to ensure you have control over what is used. +| ./k8s/load-image.sh | Load a specific container image into local kind or minikube. +| src/local/local-k8s-acceptance-tests.sh | Execute acceptance tests against cluster where `DATAFLOW_IP` is pointing. +| ./k8s/register-apps.sh | Register the Task and Stream apps used by the unit tests. +|=== + +IMPORTANT: Please report any errors with the scripts along with detail information about the relevant environment. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc index 9104731f46..8b4a2a16c4 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/appendix.adoc @@ -12,6 +12,6 @@ Having trouble with Spring Cloud Data Flow, We'd like to help! include::appendix-dataflow-template.adoc[] include::appendix-howto.adoc[] -include::appendix-building.adoc[] -include::appendix-contributing.adoc[] include::appendix-identity-providers.adoc[] +include::spring-boot-3x.adoc[] +// include::appendix-development-tasks.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc index cd56940c6d..2d7028a8d6 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/applications.adoc @@ -3,126 +3,5 @@ [partintro] -- -A selection of pre-built link:https://cloud.spring.io/spring-cloud-stream-app-starters/[stream] and link:https://cloud.spring.io/spring-cloud-task-app-starters/[task or batch] starter applications for various data integration and processing scenarios to facilitate learning and experimentation. The table in the next section includes the pre-built applications at a glance. For more details, review how to <>. +A selection of pre-built applications for various data integration and processing scenarios to facilitate learning and experimentation can be found link:https://docs.spring.io/stream-applications/docs/current/reference/html/index.html#applications/[here]. -- - -== Available Applications -[width="100%",frame="topbot",options="header",subs=attributes] -|====================== -|Source |Processor |Sink |Task - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-sftp-source[sftp] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-client-processor[tcp-client] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mqtt-sink[mqtt] -|link:https://docs.spring.io/spring-cloud-task-app-starters/docs/current/reference/htmlsingle/#_timestamp_task[timestamp] - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-jms-source[jms] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-scriptable-transform[scriptable-transform] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-log-sink[log] -|link:https://docs.spring.io/spring-cloud-task-app-starters/docs/current/reference/htmlsingle/#_composed_task_runner[composed-task-runner] - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-ftp-source[ftp] -|link:{scs-app-starters-docs-htmlsingle}/#spring-clound-stream-modules-transform-processor[transform] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-throughput-sink[throughput] -|link:https://docs.spring.io/spring-cloud-task-app-starters/docs/current/reference/htmlsingle/#_timestamp_batch_task[timestamp-batch] - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-time-source[time] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-header-enricher-processor[header-enricher] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mongodb-sink[mongodb] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-load-generator-source[load-generator] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-python-http-processor[python-http] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-ftp-sink[ftp] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-syslog-source[syslog] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-twitter-sentiment-processor[twitter-sentiment] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-jdbc-sink[jdbc] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-aws-s3-source[s3] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-splitter[splitter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-cassandra-sink[cassandra] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-loggregator-source[loggregator] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-image-recognition-processor[image-recognition] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-router-sink[router] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-trigger-source[triggertask (deprecated)] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-bridge-processor[bridge] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-redis-sink[redis-pubsub] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-twitterstream-source[twitterstream] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-pmml-processor[pmml] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-file-sink[file] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mongodb-source[mongodb] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-python-jython-processor[python-jython] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-websocket-sink[websocket] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-gemfire-cq-source[gemfire-cq] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-groovy-transform-processor[groovy-transform] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-aws-s3-sink[s3] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-http-source[http] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-httpclient-processor[httpclient] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-rabbit-sink[rabbit] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-rabbit-source[rabbit] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-filter-processor[filter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-counter-sink[counter] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-source[tcp] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-pose-estimation-processor[pose-estimation] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-pgcopy-sink[pgcopy] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-trigger-source[trigger] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-grpc-processor[grpc] -|link:https://github.com/spring-cloud-stream-app-starters/gpfdist[gpfdist] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mqtt-source[mqtt] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-groovy-filter-processor[groovy-filter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-sftp-sink[sftp] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-client-source[tcp-client] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-aggregator-processor[aggregator] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-task-launcher-dataflow-sink[task-launcher-dataflow] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-mail-source[mail] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-counter-processor[counter] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-hdfs-sink[hdfs] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-jdbc-source[jdbc] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tensorflow-processor[tensorflow] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tcp-sink[tcp] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-gemfire-source[gemfire] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-tasklaunchrequest-transform[tasklaunchrequest-transform (deprecated)] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-gemfire-sink[gemfire] -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-file-source[file] -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-object-detection-processor[object-detection] -| -| - -|link:{scs-app-starters-docs-htmlsingle}/#spring-cloud-stream-modules-sftp-dataflow-source[sftp-dataflow] -| -| -| -|====================== diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc new file mode 100644 index 0000000000..67d50ab697 --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc @@ -0,0 +1,266 @@ +[[configuration-carvel]] +== Deployment using Carvel + +Deployment of a carvel package requires the installation of tools and specific Kubernetes controllers. Then you will add the package repository to the cluster and install the application. + +For local minikube or kind cluster you can use: xref:local-k8s-development[Configure Kubernetes for local development or testing], and follow the instructions until the section _Deploy Spring Cloud Data Flow_ + +=== Required Tools + +* `kubectl` - Kubernetes CLI (Install with `brew install kubectl`) +* `carvel` - Packaging and Deployment tools + +Carvel CLI can be installed using: + +[source,shell] +.... +wget -O- https://carvel.dev/install.sh | bash +# or with curl... +curl -L https://carvel.dev/install.sh | bash +.... + +Alternative following the instructions at the bottom of the home page at link:https://carvel.dev/[carvel.dev] + +The following tools are use by the scripts. + +* `jq` - lightweight JSON parser +* `yq` - lightweight YAML parser +* `wget` - Invoke http requests. +* `dirname` provides the directory part of a filename. +* `readlink` provides absolute path of a relative link. + +NOTE: Some of these utilities are not installed in macOS or *nix by default but will be available from MacPorts or HomeBrew. + +=== Scripts + +These scripts assume you are connected to a Kubernetes cluster and `kubectl` is available. + +[cols="3m,3,6a"] +|=== +|Name | Arguments |Descriptions + +| start-deploy.sh +| [scdf-type] [namespace] [release\|snapshot] +| Configures environmental variables needs for the rest of the scripts. `BROKER`, `NS` and `SCDF_TYPE` are set. The default `NS` is `scdf`. The namespace will be created if it doesn't exist by `setup-scdf-repo.sh`. The default `SCDF_TYPE` is `oss`. _release\|snapshot_ and _scdf-type_ will determine the value of `PACKAGE_VERSION` set. + +| prepare-cluster.sh +| N/A +| Installs cert-manager, secretgen-controller and kapp-controller + +| carvel-use-template.sh +| [scdf-type] (oss, pro) +| Creates `scdf-values.yml` in current directory based on `scdf-pro-values.yml` or `scdf-oss-values.yml` + +| carvel-import-secret.sh +| [secret-namespace] [--import\|--placeholder] +| Creates an import secret, placeholder or import using secretgen-controller. +| setup-scdf-repo.sh +| [scdf-type] (oss, pro) +| Creates the namespace and installs the relevant Carvel package and credentials. If the optional _scdf-type_ is not provided the environmental variable `SCDF_TYPE` will be used. + +| configure-prometheus-proxy.sh +| [step] +| Configures Spring Boot Actuator properties for Data Flow, Skipper, Streams and Tasks. Default `step` is 10s + +| configure-database.sh +| [password/secret-username-key] [secret-password-key] +| If only _secret-name_ is provided then _secret-username-key_ defaults to `username` and _secret-password-key_ defaults to `password`. + +The following 3 combinations are allowed after the _url_: + +* +* + +* + +| deploy-scdf.sh +| [app-name] +| Deploys the application using the package and `scdf-values.yml` in the current directory. +The default _app-name_ is `scdf-${SCDF_TYPE}`. + +| update-scdf.sh +| [app-name] +| Updated the deployed application using a modified values file. +The default _app-name_ is `scdf-${SCDF_TYPE}`. + +| export-dataflow-ip.sh +| N/A +| Will print the URL to access dataflow. If you use `source ./export-dataflow-ip.sh` it will export `DATAFLOW_URL` to be used by `register-apps.sh` + +| register-apps.sh +| [stream-application-version] +| _broker_ must be one of rabbit or kafka. +_stream-application-version_ is optional and will install the latest version. The latest version is 2021.1.2 + +|=== + +NOTE: Take note that the registration of application in the _pro_ version can take a few minutes since it retrieves all version information and metadata upfront. + +=== Preparation +You will need to prepare a values file named scdf-values.yml +The following steps will provide help. + +==== Prepare Configuration parameters + +Executing the following script will configure the environmental variables needed. + +[source,shell] +.... +source ./carvel/start-deploy.sh [scdf-type] [release|snapshot] +.... + +Where: + +* `broker` is one of rabbitmq or kafka +* `namespace` A valid Kubernetes namespace other than `default` +* `scdf-type` One of oss or pro. oss is the default. +* `release|snapshot` and `scdf-type` will determine the value of `PACKAGE_VERSION`. + +_*The best option to ensure using the type and version of package intended is to modify `deploy/versions.yaml`*_ + +The environmental variables can also be configured manually to override the values. + +[cols="3m,6,2"] +|=== +|Name |Description|Default + +|PACKAGE_VERSION +|Version of Carvel package. +| Release version + +|DATAFLOW_VERSION +|Version of Spring Cloud Data Flow +|2.11.2 + +|DATAFLOW_PRO_VERSION +|Version of Spring Cloud Data Flow Pro +|1.6.1 + +|SKIPPER_VERSION +|Version of Spring Cloud Skipper +|2.11.2 + +|REGISTRY +|Url and repository of package registry. Format ``. This will be used to prefix the carvel repo and package. +| `docker.io/springcloud` + +| BROKER +| One of `kafka` or `rabbitmq` +| `rabbitmq` + +| DATABASE +| One of `mariadb` or `postgresql`. The default is `postgresql`. This will only apply when you `deploy-local-database.sh` +|`postgresql` + +| NS +| A Kubernetes namespace other than `default`. +| `scdf` + +| SCDF_TYPE +| One of `oss` or `pro`. +| `oss` + +|=== + +NOTE: The above environmental variables should only be provided if different from the default in `deploy/versions.yaml` + +==== Prepare Configuration file + +Create a file name `scdf-values.yml` by executing: + +[source,shell] +.... +./carvel/carvel-use-template.sh +.... + +Edit the file as needed to configure the deployment. The `deploy-local-` scripts will + +_Uses scdf-type previously selected._ + +=== Prepare cluster and add repository + +Login to docker and optionally registry.tanzu.vmware.com for Spring Cloud Data Flow Pro. + +[source,shell] +.... +# When deploying SCDF Pro. +export TANZU_DOCKER_USERNAME="" +export TANZU_DOCKER_PASSWORD="" +docker login --username $TANZU_DOCKER_USERNAME --password $TANZU_DOCKER_PASSWORD registry.packages.broadcom.com + +# Always required to ensure you don't experience rate limiting with Docker HUB +export DOCKER_HUB_USERNAME="" +export DOCKER_HUB_PASSWORD="" +docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD index.docker.io +.... + +Install carvel kapp-controller, secretgen-controller and certmanager + +[source,shell] +.... +./carvel/prepare-cluster.sh +.... + +Load scdf repo package for the _scdf-type_ +[source,shell] +.... +./carvel/setup-scdf-repo.sh +.... + +=== Install supporting services + +In a production environment you should be using supported database and broker services or operators along with shared observability tools. + +For local development or demonstration the following can be used to install database, broker and prometheus. + +==== Deploy local database. + +[source,shell] +.... +./carvel/deploy-local-database.sh # <1> +.... +<1> `database` must be one of `postgresql` or `mariadb`. Default is postgresql or configure in `DATABASE` using `start-deploy.sh`. + +NOTE: This script updates `scdf-values.yml` with the correct secret name. + +==== Deploy local message broker. +[source,shell] +.... +./carvel/deploy-local-broker.sh +.... + +==== Deploy local Prometheus and proxy. +[source,shell] +.... +./carvel/deploy-local-prometheus.sh +.... + +_This script also configures the Grafana endpoint in `scdf-values.yml`_ + +=== Configure Prometheus proxy + +In the case where and existing prometheus and prometheus proxy is deployed the proxy can be configured using: + +[source,shell] +.... +./carvel/configure-prometheus-proxy.sh [step] +.... + +=== Deploy Spring Cloud Data Flow + +You can configure the before `register-apps.sh`: + +* `STREAM_APPS_RT_VERSION` Stream Apps Release Train Version. _Default is 2022.0.0_. +* `STREAM_APPS_VERSION` Stream Apps Version. _Default is 4.0.0_. + +[source,shell] +.... +./carvel/deploy-scdf.sh +source ./carvel/export-dataflow-ip.sh +# expected output: Dataflow URL: +./carvel/register-apps.sh +.... + +=== Update deployed application. + +You can modify the values file used during installation and then update the deployment using `./carvel/update-scdf.sh` diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc index 69574cbc08..1d7b91662f 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-cloudfoundry.adoc @@ -149,13 +149,13 @@ are `http` (the default), `port`, and `none`. You can also set environment variables that specify the HTTP-based health check endpoint and timeout: `SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_HEALTH_CHECK_ENDPOINT` and `SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_HEALTH_CHECK_TIMEOUT`, respectively. These default to `/health` (the Spring Boot default location) and `120` seconds. -* You can also specify deployment properties by using the DSL. For instance, if you want to set the allocated memory for the `http` application to 512m and also bind a mysql service to the `jdbc` application, you can run the following commands: +* You can also specify deployment properties by using the DSL. For instance, if you want to set the allocated memory for the `http` application to 512m and also bind a postgres service to the `jdbc` application, you can run the following commands: [source,bash,subs=attributes] ---- -dataflow:> stream create --name mysqlstream --definition "http | jdbc --tableName=names --columns=name" -dataflow:> stream deploy --name mysqlstream --properties "deployer.http.memory=512, deployer.jdbc.cloudfoundry.services=mysql" +dataflow:> stream create --name postgresstream --definition "http | jdbc --tableName=names --columns=name" +dataflow:> stream deploy --name postgresstream --properties "deployer.http.memory=512, deployer.jdbc.cloudfoundry.services=postgres" ---- @@ -206,7 +206,7 @@ spring: memory: 512m disk: 2048m instances: 4 - services: rabbit,mysql + services: rabbit,postgres appNamePrefix: dev1 qa: connection: @@ -221,7 +221,7 @@ spring: memory: 756m disk: 724m instances: 2 - services: rabbitQA,mysqlQA + services: rabbitQA,postgresQA appNamePrefix: qa1 ---- @@ -287,15 +287,16 @@ cf set-env dataflow-server SPRING_APPLICATION_JSON '{"spring.cloud.dataflow.appl ---- -For Spring Cloud Task apps, you can use something similar to the following, if you use a database service instance named `mysql`: +For Spring Cloud Task apps, you can use something similar to the following, if you use a database service instance named `postgres`: [source,bash,subs=attributes] ---- -cf set-env SPRING_DATASOURCE_URL '${vcap.services.mysql.credentials.jdbcUrl}' -cf set-env SPRING_DATASOURCE_USERNAME '${vcap.services.mysql.credentials.username}' -cf set-env SPRING_DATASOURCE_PASSWORD '${vcap.services.mysql.credentials.password}' +cf set-env SPRING_DATASOURCE_URL '${vcap.services.postgres.credentials.jdbcUrl}' +cf set-env SPRING_DATASOURCE_USERNAME '${vcap.services.postgres.credentials.username}' +cf set-env SPRING_DATASOURCE_PASSWORD '${vcap.services.postgres.credentials.password}' cf set-env SPRING_DATASOURCE_DRIVER_CLASS_NAME 'org.mariadb.jdbc.Driver' +cf set-env SPRING_JPA_DATABASE_PLATFORM 'org.hibernate.dialect.MariaDB106Dialect' ---- @@ -322,23 +323,23 @@ SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_US When deploying streams in Cloud Foundry, you can take advantage of application-specific service bindings, so not all services are globally configured for all the apps orchestrated by Spring Cloud Data Flow. -For instance, if you want to provide a `mysql` service binding only for the `jdbc` application in the following stream +For instance, if you want to provide a `postgres` service binding only for the `jdbc` application in the following stream definition, you can pass the service binding as a deployment property: [source,bash,subs=attributes] ---- dataflow:>stream create --name httptojdbc --definition "http | jdbc" -dataflow:>stream deploy --name httptojdbc --properties "deployer.jdbc.cloudfoundry.services=mysqlService" +dataflow:>stream deploy --name httptojdbc --properties "deployer.jdbc.cloudfoundry.services=postgresService" ---- -where `mysqlService` is the name of the service specifically bound only to the `jdbc` application and the `http` +where `postgresService` is the name of the service specifically bound only to the `jdbc` application and the `http` application does not get the binding by this method. If you have more than one service to bind, they can be passed as comma-separated items -(for example: `deployer.jdbc.cloudfoundry.services=mysqlService,someService`). +(for example: `deployer.jdbc.cloudfoundry.services=postgresService,someService`). [[configure-service-binding-parameters]] === Configuring Service binding parameters @@ -372,7 +373,7 @@ Since a comma is also used to separate configuration parameters, and to avoid wh [source] ---- -rabbitmq,'nfs_service_instance uid:1000,gid:1000,mount:/var/volume1,readonly:true',mysql,'my-google-bigquery-example role:bigquery.user' +rabbitmq,'nfs_service_instance uid:1000,gid:1000,mount:/var/volume1,readonly:true',postgres,'my-google-bigquery-example role:bigquery.user' ---- [TIP] @@ -505,10 +506,6 @@ The following example shows how to deploy the `http` health check type to an end Though we recommend using a Maven Artifactory for application <>, there might be situations where one of the following alternative approaches would make sense. -* We have custom-built and maintain a link:https://github.com/spring-cloud-stream-app-starters/scdf-app-tool[SCDF APP Tool] -that can run as a regular Spring Boot application in Cloud Foundry, but it will in turn host and serve the application -JARs for SCDF at runtime. - * With the help of Spring Boot, we can serve link:https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-developing-web-applications.html#boot-features-spring-mvc-static-content[static content] in Cloud Foundry. A simple Spring Boot application can bundle all the required stream and task applications. By having it run on Cloud Foundry, the static application can then serve the über-jar's. From the shell, you can, for example, register the @@ -742,17 +739,17 @@ logging.level.cloudfoundry-client == DEBUG === Spring Cloud Config Server -You can use Spring Cloud Config Server to centralize configuration properties for Spring Boot applications. Likewise, -both Spring Cloud Data Flow and the applications orchestrated by Spring Cloud Data Flow can be integrated with -a configuration server to use the same capabilities. +You can use Spring Cloud Config Server to centralize configuration properties for Spring Boot applications. +Likewise, both Spring Cloud Data Flow and the applications orchestrated by Spring Cloud Data Flow can be integrated with a configuration server to use the same capabilities. ==== Stream, Task, and Spring Cloud Config Server Similar to Spring Cloud Data Flow server, you can configure both the stream and task applications to resolve the centralized properties from the configuration server. Setting the `spring.cloud.config.uri` property for the deployed applications is a common way to bind to the configuration server. See the link:https://cloud.spring.io/spring-cloud-config/spring-cloud-config.html#_spring_cloud_config_client[Spring Cloud Config Client] reference guide for more information. -Since this property is likely to be used across all applications deployed by the Data Flow server, the Data Flow server's `spring.cloud.dataflow.applicationProperties.stream` property for stream applications and `spring.cloud.dataflow.applicationProperties.task` property for task applications can be used to pass the `uri` of the Config Server to each deployed stream or task application. See the section on <> for more information. -Note that, if you use applications from the link:https://cloud.spring.io/spring-cloud-stream-app-starters/[App Starters project], these applications already embed the `spring-cloud-services-starter-config-client` dependency. +Since this property is likely to be used across all deployed applications, the Data Flow server's `spring.cloud.dataflow.applicationProperties.stream` property for stream applications and `spring.cloud.dataflow.applicationProperties.task` property for task applications can be used to pass the `uri` of the Config Server to each deployed stream or task application. See the section on <> for more information. + +Note that, if you use the out-of-the-box link:https://spring.io/projects/spring-cloud-stream-applications/[Stream Applications], these applications already embed the `spring-cloud-services-starter-config-client` dependency. If you build your application from scratch and want to add the client side support for config server, you can add a dependency reference to the config server client library. The following snippet shows a Maven example: @@ -780,10 +777,9 @@ If you know that you are not using config server functionality, you can disable The following SCDF and Skipper `manifest.yml` templates includes the required environment variables for the Skipper and Spring Cloud Data Flow server and deployed applications and tasks to successfully run on Cloud Foundry and automatically resolve centralized properties from `my-config-server` at runtime: - -[source,yml] +.SCDF manifest.yml +[source,yaml] ---- ---- applications: - name: data-flow-server host: data-flow-server @@ -793,7 +789,7 @@ applications: path: {PATH TO SERVER UBER-JAR} env: SPRING_APPLICATION_NAME: data-flow-server - MAVEN_REMOTE_REPOSITORIES_REPO1_URL: https://repo.spring.io/libs-snapshot + MAVEN_REMOTEREPOSITORIES_REPO1_URL: https://my.custom.repo/prod-repo SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_URL: https://api.sys.huron.cf-app.com SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_ORG: sabby20 SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SPACE: sabby20 @@ -801,13 +797,16 @@ applications: SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_USERNAME: admin SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: *** SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SKIP_SSL_VALIDATION: true - SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: mysql + SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: postgres SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI: https:///api services: -- mysql +- postgres - my-config-server +---- ---- +.Skipper manifest.yml +[source,yaml] +---- applications: - name: skipper-server host: skipper-server @@ -829,11 +828,10 @@ applications: SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: admin SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SKIP_SSL_VALIDATION: false SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_DELETE_ROUTES: false - SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: rabbit, my-config-server + SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: rabbit,my-config-server services: -- mysql +- postgres my-config-server - ---- where `my-config-server` is the name of the Spring Cloud Config Service instance running on Cloud Foundry. @@ -857,7 +855,6 @@ get their configuration from the `my-config-server` Cloud Config server (deploye [source,yml,options="wrap"] ---- ---- applications: - name: test-server host: test-server @@ -867,7 +864,7 @@ applications: path: spring-cloud-dataflow-server-VERSION.jar env: SPRING_APPLICATION_NAME: test-server - MAVEN_REMOTE_REPOSITORIES_REPO1_URL: https://repo.spring.io/libs-snapshot + MAVEN_REMOTEREPOSITORIES_REPO1_URL: https://my.custom.repo/prod-repo SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_URL: https://api.sys.huron.cf-app.com SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_ORG: sabby20 SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SPACE: sabby20 @@ -875,22 +872,19 @@ applications: SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_USERNAME: admin SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: *** SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_SKIP_SSL_VALIDATION: true - SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: mysql, config-server + SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: postgres, config-server SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI: https:///api TRUST_CERTS: #this is for the server SPRING_CLOUD_DATAFLOW_APPLICATION_PROPERTIES_TASK_TRUST_CERTS: #this propagates to all tasks services: -- mysql +- postgres - my-config-server #this is for the server ---- - Also add the `my-config-server` service to the Skipper's manifest environment - [source,yml] ---- ---- applications: - name: skipper-server host: skipper-server @@ -912,9 +906,8 @@ applications: SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_CONNECTION_PASSWORD: SPRING_CLOUD_SKIPPER_SERVER_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_DEPLOYMENT_SERVICES: rabbit, my-config-server #this is so all stream applications bind to my-config-server services: -- mysql +- postgres my-config-server - ---- @@ -945,7 +938,6 @@ The following sample manifest shows both environment properties configured (assu [source,yml] ---- ---- applications: - name: data-flow-server host: data-flow-server @@ -967,7 +959,7 @@ applications: SPRING_CLOUD_SKIPPER_CLIENT_SERVER_URI: https:///api SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_SCHEDULER_SCHEDULER_URL: https://scheduler.local.pcfdev.io services: -- mysql +- postgres ---- Where the `SPRING_CLOUD_DATAFLOW_TASK_PLATFORM_CLOUDFOUNDRY_ACCOUNTS[default]_SCHEDULER_SCHEDULER_URL` has the following format: `scheduler.` (for diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-database.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-database.adoc new file mode 100644 index 0000000000..f53ef92687 --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-database.adoc @@ -0,0 +1,88 @@ + +[[configuration-database-overview]] +A relational database is used to store stream and task definitions as well as the state of executed tasks. +Spring Cloud Data Flow provides schemas for *MariaDB*, *MySQL*, *Oracle*, *PostgreSQL*, *Db2*, *SQL Server*, and *H2*. The schema is automatically created when the server starts. + +NOTE: The JDBC drivers for *MariaDB*, *MySQL* (via the _MariaDB_ driver), *PostgreSQL*, *SQL Server* are available without additional configuration. To use any other database you need to put the corresponding JDBC driver jar on the classpath of the server as described <<#add-custom-driver,here>>. + +To configure a database the following properties must be set: + +* `spring.datasource.url` +* `spring.datasource.username` +* `spring.datasource.password` +* `spring.datasource.driver-class-name` +* `spring.jpa.database-platform` + +The `username` and `password` are the same regardless of the database. However, the `url` and `driver-class-name` vary per database as follows. + +[frame="none"] +[cols="a,a,a,a,a"] +[cols="10%,20%,20%,20%,10%"] +|=== +|[.small]#Database#|[.small]#spring.datasource.url#|[.small]#spring.datasource.driver-class-name#|[.small]#spring.jpa.database-platform#|[.small]#Driver included# + +|[.small]#MariaDB 10.0 - 10.1# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB10Dialect# +|[.small]#Yes# + +|[.small]#MariaDB 10.2# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB102Dialect# +|[.small]#Yes# + +|[.small]#MariaDB 10.3 - 10.5# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB103Dialect# +|[.small]#Yes# + +|[.small]#MariaDB 10.6+# +|[.small]#jdbc:mariadb://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MariaDB106Dialect#{empty}footnote:[If the database was migrated from MySQL and uses sequence tables use `org.hibernate.dialectMariaDB102Dialect`. Please note that Hibernate selects incorrect Dialect when using MariaDB 11.] +|[.small]#Yes# + +|[.small]#MySQL 5.7# +|[.small]#jdbc:mysql://${db-hostname}:${db-port}/${db-name}?permitMysqlScheme# +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MySQL57Dialect# +|[.small]#Yes# + +|[.small]#MySQL 8.0+# +|[.small]#jdbc:mysql://${db-hostname}:${db-port}/${db-name}?allowPublicKeyRetrieval=true&useSSL=false&autoReconnect=true&permitMysqlScheme#{empty}footnote:[SSL is disabled in this example, adjust accordingly for your environment and requirements] +|[.small]#org.mariadb.jdbc.Driver# +|[.small]#org.hibernate.dialect.MySQL8Dialect# +|[.small]#Yes# + +|[.small]#PostgresSQL# +|[.small]#jdbc:postgres://${db-hostname}:${db-port}/${db-name}# +|[.small]#org.postgresql.Driver# +|[.small]#Remove for Hibernate default# +|[.small]#Yes# + +|[.small]#SQL Server# +|[.small]#jdbc:sqlserver://${db-hostname}:${db-port};databasename=${db-name}&encrypt=false# +|[.small]#com.microsoft.sqlserver.jdbc.SQLServerDriver# +|[.small]#Remove for Hibernate default# +|[.small]#Yes# + +|[.small]#DB2# +|[.small]#jdbc:db2://${db-hostname}:${db-port}/{db-name}# +|[.small]#com.ibm.db2.jcc.DB2Driver# +|[.small]#Remove for Hibernate default# +|[.small]#No# + +|[.small]#Oracle# +|[.small]#jdbc:oracle:thin:@${db-hostname}:${db-port}/{db-name}# +|[.small]#oracle.jdbc.OracleDriver# +|[.small]#Remove for Hibernate default# +|[.small]#No# +|=== + +==== H2 +When no other database is configured then Spring Cloud Data Flow uses an embedded instance of the *H2* database as the default. + +NOTE: *H2* is good for development purposes but is not recommended for production use nor is it supported as an external mode. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes-app-properties.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes-app-properties.adoc new file mode 100644 index 0000000000..8aa0be731a --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes-app-properties.adoc @@ -0,0 +1,1173 @@ + +=== Application and Server Properties + +This section covers how you can customize the deployment of your applications. You can use a number of properties to influence settings for the applications that are deployed. Properties can be applied on a per-application basis or in the appropriate server configuration for all deployed applications. + +NOTE: Properties set on a per-application basis always take precedence over properties set as the server configuration. This arrangement lets you override global server level properties on a per-application basis. + +Properties to be applied for all deployed Tasks are defined in the `src/kubernetes/server/server-config-[binder].yaml` file and for Streams in `src/kubernetes/skipper/skipper-config-[binder].yaml`. Replace `[binder]` with the messaging middleware you are using -- for example, `rabbit` or `kafka`. + +==== Memory and CPU Settings + +Applications are deployed with default memory and CPU settings. If you need to, you can adjust these values. The following example shows how to set `Limits` to `1000m` for `CPU` and `1024Mi` for memory and `Requests` to `800m` for CPU and `640Mi` for memory: + +==== +[source] +---- +deployer..kubernetes.limits.cpu=1000m +deployer..kubernetes.limits.memory=1024Mi +deployer..kubernetes.requests.cpu=800m +deployer..kubernetes.requests.memory=640Mi +---- +==== + +Those values results in the following container settings being used: + +==== +[source] +---- +Limits: + cpu: 1 + memory: 1Gi +Requests: + cpu: 800m + memory: 640Mi +---- +==== + +You can also control the default values to which to set the `cpu` and `memory` globally. + +The following example shows how to set the CPU and memory for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + limits: + memory: 640mi + cpu: 500m +---- +==== + +The following example shows how to set the CPU and memory for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + limits: + memory: 640mi + cpu: 500m +---- +==== + +The settings we have used so far affect only the settings for the container. They do not affect the memory setting for the JVM process in the container. If you would like to set JVM memory settings, you can set an environment variable to do so. See the next section for details. + +==== Environment Variables + +To influence the environment settings for a given application, you can use the `spring.cloud.deployer.kubernetes.environmentVariables` deployer property. +For example, a common requirement in production settings is to influence the JVM memory arguments. +You can do so by using the `JAVA_TOOL_OPTIONS` environment variable, as the following example shows: + +==== +[source] +---- +deployer..kubernetes.environmentVariables=JAVA_TOOL_OPTIONS=-Xmx1024m +---- +==== + +NOTE: The `environmentVariables` property accepts a comma-delimited string. If an environment variable contains a value +that is also a comma-delimited string, it must be enclosed in single quotation marks -- for example, +`spring.cloud.deployer.kubernetes.environmentVariables=spring.cloud.stream.kafka.binder.brokers='somehost:9092, +anotherhost:9093'` + +This overrides the JVM memory setting for the desired `` (replace `` with the name of your application). + +[[getting-started-kubernetes-probes]] +==== Liveness, Readiness and Startup Probes + +The `liveness` and `readiness` probes use paths called `/health/liveness` and `/health/readiness`, respectively. They use a `delay` of `1` for both and a `period` of `60` and `10` respectively. You can change these defaults when you deploy the stream by using deployer properties. The liveness and readiness probes are applied only to streams. + +The `startup` probe will use the `/health` path and a delay of 30 and period for 3 with a failure threshold of 20 times before the container restarts the application. + +The following example changes the `liveness` and `startup` probes (replace `` with the name of your application) by setting deployer properties: + +==== +[source] +---- +deployer..kubernetes.livenessProbePath=/health/livesness +deployer..kubernetes.livenessProbeDelay=1 +deployer..kubernetes.livenessProbePeriod=60 +deployer..kubernetes.livenessProbeSuccess=1 +deployer..kubernetes.livenessProbeFailure=3 +deployer..kubernetes.readinessProbePath=/health/readiness +deployer..kubernetes.readinessProbeDelay=1 +deployer..kubernetes.readinessProbePeriod=60 +deployer..kubernetes.readinessProbeSuccess=1 +deployer..kubernetes.readinessProbeFailure=3 +deployer..kubernetes.startupHttpProbePath=/health +deployer..kubernetes.startupProbeDelay=20 +deployer..kubernetes.startupProbeSuccess=1 +deployer..kubernetes.startupProbeFailure=30 +deployer..kubernetes.startupProbePeriod=5 +deployer..kubernetes.startupProbeTimeout=3 +---- +==== + +You can declare the same as part of the server global configuration for streams, as the following example shows: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + livenessHttpProbePath: /health/liveness + livenessProbeDelay: 1 + livenessProbePeriod: 60 + livenessProbeSuccess: 1 + livenessProbeFailure: 3 + startupHttpProbePath: /health + startupProbeDelay: 20 + startupProbeSuccess: 1 + startupProbeFailure: 30 + startupProbePeriod: 5 + startupProbeTimeout: 3 +---- +==== + +Similarly, you can swap `liveness` for `readiness` to override the default `readiness` settings. + +By default, port 8080 is used as the probe port. You can change the defaults for both `liveness` and `readiness` probe ports by using deployer properties, as the following example shows: + +==== +[source] +---- +deployer..kubernetes.readinessProbePort=7000 +deployer..kubernetes.livenessProbePort=7000 +deployer..kubernetes.startupProbePort=7000 +---- +==== + +You can declare the same as part of the global configuration for streams, as the following example shows: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + readinessProbePort: 7000 + livenessProbePort: 7000 + startupProbePort: 7000 +---- +==== + +[NOTE] +===== +By default, the `liveness` and `readiness` probe paths use Spring Boot 2.x+ actuator endpoints. To use Spring Boot 1.x actuator endpoint paths, you must adjust the `liveness` and `readiness` values, as the following example shows (replace `` with the name of your application): + +The `startup` probe path will default to the management path `/info` but may be modified as needed. + +==== +[source] +---- +deployer..kubernetes.startupProbePath=/api +---- +==== + +To automatically set both `liveness` and `readiness` endpoints on a per-application basis to the default Spring Boot 1.x paths, you can set the following property: + +==== +[source] +---- +deployer..kubernetes.bootMajorVersion=1 +---- +==== + +===== + +You can access secured probe endpoints by using credentials stored in a https://kubernetes.io/docs/concepts/configuration/secret/[Kubernetes secret]. You can use an existing secret, provided the credentials are contained under the `credentials` key name of the secret's `data` block. You can configure probe authentication on a per-application basis. When enabled, it is applied to both the `liveness` and `readiness` probe endpoints by using the same credentials and authentication type. Currently, only `Basic` authentication is supported. + +To create a new secret: + +. Generate the base64 string with the credentials used to access the secured probe endpoints. ++ +Basic authentication encodes a username and a password as a base64 string in the format of `username:password`. ++ +The following example (which includes output and in which you should replace `user` and `pass` with your values) shows how to generate a base64 string: ++ +==== +[source,shell] +---- +$ echo -n "user:pass" | base64 +dXNlcjpwYXNz +---- +==== + +. With the encoded credentials, create a file (for example, `myprobesecret.yml`) with the following contents: ++ +==== +[source] +---- +apiVersion: v1 +kind: Secret +metadata: + name: myprobesecret +type: Opaque +data: + credentials: GENERATED_BASE64_STRING +---- +==== + +. Replace `GENERATED_BASE64_STRING` with the base64-encoded value generated earlier. + +. Create the secret by using `kubectl`, as the following example shows: ++ +==== +[source,shell] +---- +$ kubectl create -f ./myprobesecret.yml +secret "myprobesecret" created +---- +==== + +. Set the following deployer properties to use authentication when accessing probe endpoints, as the following example shows: ++ +==== +[source] +---- +deployer..kubernetes.probeCredentialsSecret=myprobesecret +---- +==== ++ +Replace `` with the name of the application to which to apply authentication. + +==== Using `SPRING_APPLICATION_JSON` + +You can use a `SPRING_APPLICATION_JSON` environment variable to set Data Flow server properties (including the configuration of Maven repository settings) that are common across all of the Data Flow server implementations. These settings go at the server level in the container `env` section of a deployment YAML. The following example shows how to do so: + +==== +[source,options=nowrap] +---- +env: +- name: SPRING_APPLICATION_JSON + value: "{ \"maven\": { \"local-repository\": null, \"remote-repositories\": { \"repo1\": { \"url\": \"https://my.custom.repo/prod-repo/"} } } }" +---- +==== + +==== Private Docker Registry + +You can pull Docker images from a private registry on a per-application basis. First, you must create a secret in the cluster. Follow the https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/[Pull an Image from a Private Registry] guide to create the secret. + +Once you have created the secret, you can use the `imagePullSecret` property to set the secret to use, as the following example shows: + +==== +[source] +---- +deployer..kubernetes.imagePullSecret=mysecret +---- +==== + +Replace `` with the name of your application and `mysecret` with the name of the secret you created earlier. + +You can also configure the image pull secret at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + imagePullSecret: mysecret +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + imagePullSecret: mysecret +---- +==== + +Replace `mysecret` with the name of the secret you created earlier. + +==== Annotations + +You can add annotations to Kubernetes objects on a per-application basis. The supported object types are pod `Deployment`, `Service`, and `Job`. Annotations are defined in a `key:value` format, allowing for multiple annotations separated by a comma. For more information and use cases on annotations, see https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/[Annotations]. + +The following example shows how you can configure applications to use annotations: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.podAnnotations=annotationName:annotationValue +deployer..kubernetes.serviceAnnotations=annotationName:annotationValue,annotationName2:annotationValue2 +deployer..kubernetes.jobAnnotations=annotationName:annotationValue +---- +==== + +Replace `` with the name of your application and the value of your annotations. + +==== Entry Point Style + +An entry point style affects how application properties are passed to the container to be deployed. Currently, three styles are supported: + +* `exec` (default): Passes all application properties and command line arguments in the deployment request as container arguments. Application properties are transformed into the format of `--key=value`. +* `shell`: Passes all application properties and command line arguments as environment variables. Each of the applicationor command-line argument properties is transformed into an uppercase string and `.` characters are replaced with `_`. +* `boot`: Creates an environment variable called `SPRING_APPLICATION_JSON` that contains a JSON representation of all application properties. Command line arguments from the deployment request are set as container args. + +NOTE: In all cases, environment variables defined at the server-level configuration and on a per-application basis are sent on to the container as is. + +You can configure an application as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.entryPointStyle= +---- +==== + +Replace `` with the name of your application and `` with your desired entry point style. + +You can also configure the entry point style at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + entryPointStyle: entryPointStyle +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + entryPointStyle: entryPointStyle +---- +==== + +Replace `entryPointStyle` with the desired entry point style. + +You should choose an Entry Point Style of either `exec` or `shell`, to correspond to how the `ENTRYPOINT` syntax is defined in the container's `Dockerfile`. For more information and uses cases on `exec` versus `shell`, see the https://docs.docker.com/engine/reference/builder/#entrypoint[ENTRYPOINT] section of the Docker documentation. + +Using the `boot` entry point style corresponds to using the `exec` style `ENTRYPOINT`. Command line arguments from the deployment request are passed to the container, with the addition of application properties being mapped into the `SPRING_APPLICATION_JSON` environment variable rather than command line arguments. + +NOTE: When you use the `boot` Entry Point Style, the `deployer..kubernetes.environmentVariables` property must not contain `SPRING_APPLICATION_JSON`. + +==== Deployment Service Account + +You can configure a custom service account for application deployments through properties. You can use an existing service account or create a new one. One way to create a service account is by using `kubectl`, as the following example shows: + +==== +[source,shell] +---- +$ kubectl create serviceaccount myserviceaccountname +serviceaccount "myserviceaccountname" created +---- +==== + +Then you can configure individual applications as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.deploymentServiceAccountName=myserviceaccountname +---- +==== + +Replace `` with the name of your application and `myserviceaccountname` with your service account name. + +You can also configure the service account name at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + deploymentServiceAccountName: myserviceaccountname +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + deploymentServiceAccountName: myserviceaccountname +---- +==== + +Replace `myserviceaccountname` with the service account name to be applied to all deployments. + +==== Image Pull Policy + +An image pull policy defines when a Docker image should be pulled to the local registry. Currently, three policies are supported: + +* `IfNotPresent` (default): Do not pull an image if it already exists. +* `Always`: Always pull the image regardless of whether it already exists. +* `Never`: Never pull an image. Use only an image that already exists. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.imagePullPolicy=IfNotPresent +---- +==== + +Replace `` with the name of your application and `Always` with your desired image pull policy. + +You can configure an image pull policy at the global server level. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + imagePullPolicy: IfNotPresent +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + imagePullPolicy: Always +---- +==== + +Replace `Always` with your desired image pull policy. + +==== Deployment Labels + +You can set custom labels on objects related to https://kubernetes.io/docs/concepts/workloads/controllers/deployment/[Deployment]. See https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/[Labels] for more information on labels. Labels are specified in `key:value` format. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue +---- +==== + +Replace `` with the name of your application, `myLabelName` with your label name, and `myLabelValue` with the value of your label. + +Additionally, you can apply multiple labels, as the following example shows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue,myLabelName2:myLabelValue2 +---- +==== + +==== Tolerations + +Tolerations work with taints to ensure pods are not scheduled onto particular nodes. +Tolerations are set into the pod configuration while taints are set onto nodes. +See the https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/[Taints and Tolerations] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.tolerations=[{key: 'mykey', operator: 'Equal', value: 'myvalue', effect: 'NoSchedule'}] +---- +==== + +Replace `` with the name of your application and the key-value pairs according to your desired toleration configuration. + +You can configure tolerations at the global server level as well. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + tolerations: + - key: mykey + operator: Equal + value: myvalue + effect: NoSchedule +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + tolerations: + - key: mykey + operator: Equal + value: myvalue + effect: NoSchedule +---- +==== + +Replace the `tolerations` key-value pairs according to your desired toleration configuration. + +==== Secret References + +Secrets can be referenced and their entire data contents can be decoded and inserted into the pod environment as individual variables. +See the https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/#configure-all-key-value-pairs-in-a-secret-as-container-environment-variables[Configure all key-value pairs in a Secret as container environment variables] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.secretRefs=testsecret +---- +==== + +You can also specify multiple secrets, as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.secretRefs=[testsecret,anothersecret] +---- +==== + +Replace `` with the name of your application and the `secretRefs` attribute with the appropriate values for your application environment and secret. + +You can configure secret references at the global server level as well. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + secretRefs: + - testsecret + - anothersecret +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + secretRefs: + - testsecret + - anothersecret +---- +==== + +Replace the items of `secretRefs` with one or more secret names. + +==== Secret Key References + +Secrets can be referenced and their decoded value can be inserted into the pod environment. +See the https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables[Using Secrets as Environment Variables] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.secretKeyRefs=[{envVarName: 'MY_SECRET', secretName: 'testsecret', dataKey: 'password'}] +---- +==== + +Replace `` with the name of your application and the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your application environment and secret. + +You can configure secret key references at the global server level as well. + +The following example shows how to do so for streams: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + secretKeyRefs: + - envVarName: MY_SECRET + secretName: testsecret + dataKey: password +---- +==== + +The following example shows how to do so for tasks: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + secretKeyRefs: + - envVarName: MY_SECRET + secretName: testsecret + dataKey: password +---- +==== + +Replace the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your secret. + +==== ConfigMap References + +A ConfigMap can be referenced and its entire data contents can be decoded and inserted into the pod environment as individual variables. +See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#configure-all-key-value-pairs-in-a-configmap-as-container-environment-variables[Configure all key-value pairs in a ConfigMap as container environment variables] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.configMapRefs=testcm +---- +==== + +You can also specify multiple ConfigMap instances, as follows: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.configMapRefs=[testcm,anothercm] +---- +==== + +Replace `` with the name of your application and the `configMapRefs` attribute with the appropriate values for your application environment and ConfigMap. + +You can configure ConfigMap references at the global server level as well. + +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + configMapRefs: + - testcm + - anothercm +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + configMapRefs: + - testcm + - anothercm +---- +==== + +Replace the items of `configMapRefs` with one or more secret names. + +==== ConfigMap Key References + +A ConfigMap can be referenced and its associated key value inserted into the pod environment. +See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#define-container-environment-variables-using-configmap-data[Define container environment variables using ConfigMap data] section of the Kubernetes reference for more information. + +The following example shows how you can individually configure applications: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.configMapKeyRefs=[{envVarName: 'MY_CM', configMapName: 'testcm', dataKey: 'platform'}] +---- +==== + +Replace `` with the name of your application and the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your application environment and ConfigMap. + +You can configure ConfigMap references at the global server level as well. + +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + configMapKeyRefs: + - envVarName: MY_CM + configMapName: testcm + dataKey: platform +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + configMapKeyRefs: + - envVarName: MY_CM + configMapName: testcm + dataKey: platform +---- +==== + +Replace the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your ConfigMap. + +==== Pod Security Context +The pod https://kubernetes.io/docs/tasks/configure-pod-container/security-context/[security context] specifies security settings for a pod and its containers. + +The configurable options are listed <> +[.small]#(more details for each option can be found in the https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.24/#podsecuritycontext-v1-core[Pod Security Context] section of the Kubernetes API reference)#. + +The following example shows how you can configure the security context for an individual application pod: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.podSecurityContext={runAsUser: 65534, fsGroup: 65534, supplementalGroups: [65534, 65535], seccompProfile: { type: 'RuntimeDefault' }} +---- +==== + +Replace `` with the name of your application and any attributes with the appropriate values for your container environment. + +You can configure the pod security context at the global server level as well. +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + podSecurityContext: + runAsUser: 65534 + fsGroup: 65534 + supplementalGroups: [65534,65535] + seccompProfile: + type: Localhost + localhostProfile: my-profiles/profile-allow.json +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + podSecurityContext: + runAsUser: 65534 + fsGroup: 65534 + supplementalGroups: [65534,65535] + seccompProfile: + type: Localhost + localhostProfile: my-profiles/profile-allow.json +---- +==== + +Adjust the `podSecurityContext` attributes with the appropriate values for your container environment. + + +==== Container Security Context +The container https://kubernetes.io/docs/tasks/configure-pod-container/security-context/[security context] specifies security settings for an individual container. + +The configurable options are listed <> +[.small]#(more details for each option can be found in the https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.24/#securitycontext-v1-core[Container Security Context] section of the Kubernetes API reference#). + +NOTE: The container security context is applied to all containers in your deployment unless they have their own security already explicitly defined, including regular init containers, stateful set init containers, and additional containers. + +The following example shows how you can configure the security context for containers in an individual application pod: +==== +[source,options=nowrap] +---- +deployer..kubernetes.containerSecurityContext={allowPrivilegeEscalation: true, runAsUser: 65534} +---- +==== + +Replace `` with the name of your application and any attributes with the appropriate values for your container environment. + +You can configure the container security context at the global server level as well. +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + containerSecurityContext: + allowPrivilegeEscalation: true + runAsUser: 65534 +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + containerSecurityContext: + allowPrivilegeEscalation: true + runAsUser: 65534 +---- +==== + +Adjust the `containerSecurityContext` attributes with the appropriate values for your container environment. + + +==== Service Ports + +When you deploy applications, a kubernetes Service object is created with a default port of `8080`. If the `server.port` property is set, it overrides the default port value. You can add additional ports to the Service object on a per-application basis. You can add multiple ports with a comma delimiter. + +The following example shows how you can configure additional ports on a Service object for an application: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.servicePorts=5000 +deployer..kubernetes.servicePorts=5000,9000 +---- +==== + +Replace `` with the name of your application and the value of your ports. + +==== StatefulSet Init Container + +When deploying an application by using a StatefulSet, an Init Container is used to set the instance index in the pod. +By default, the image used is `busybox`, which you can be customize. + +The following example shows how you can individually configure application pods: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.statefulSetInitContainerImageName=myimage:mylabel +---- +==== + +Replace `` with the name of your application and the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. + +You can configure the StatefulSet Init Container at the global server level as well. + +The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + skipper: + server: + platform: + kubernetes: + accounts: + default: + statefulSetInitContainerImageName: myimage:mylabel +---- +==== + +The following example shows how to do so for tasks by editing the `server-config.yaml` file: + +==== +[source,yaml] +---- +data: + application.yaml: |- + spring: + cloud: + dataflow: + task: + platform: + kubernetes: + accounts: + default: + statefulSetInitContainerImageName: myimage:mylabel +---- +==== + +Replace the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. + +==== Init Containers + +When you deploy applications, you can set a custom Init Container on a per-application basis. +Refer to the https://kubernetes.io/docs/concepts/workloads/pods/init-containers/[Init Containers] section of the Kubernetes reference for more information. + +The following example shows how you can configure an Init Container or multiple Init Containers for an application: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.initContainer={containerName: 'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']} +# alternative for multiple init containers +deployer..kubernetes.initContainers=[{containerName:'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']}, {containerName:'test2', imageName:'busybox:latest', commands:['sh', '-c', 'echo world']}] +# multiple containers can be created inidividually +deployer..kubernetes.initContainers[0]={containerName:'test', imageName:'busybox:latest', commands:['sh', '-c', 'echo hello']} +deployer..kubernetes.initContainers[1]={containerName:'test2', imageName:'busybox:latest', commands:['sh', '-c', 'echo world']} +---- +==== + +Replace `` with the name of your application and set the values of the `initContainer` attributes appropriate for your Init Container. + +==== Lifecycle Support + +When you deploy applications, you may attach `postStart` and `preStop` https://kubernetes.io/docs/tasks/configure-pod-container/attach-handler-lifecycle-event/[Lifecycle handlers] to execute commands. +The Kubernetes API supports other types of handlers besides `exec`. This feature may be extended to support additional actions in a future release. +To configure the Lifecycle handlers as shown in the linked page above,specify each command as a comma-delimited list, using the following property keys: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.lifecycle.postStart.exec.command=/bin/sh,-c,'echo Hello from the postStart handler > /usr/share/message' +deployer..kubernetes.lifecycle.preStop.exec.command=/bin/sh,-c,'nginx -s quit; while killall -0 nginx; do sleep 1; done' +---- +==== + +==== Additional Containers + +When you deploy applications, you may need one or more containers to be deployed along with the main container. +This would allow you to adapt some deployment patterns such as sidecar, adapter in case of multi container pod setup. + +The following example shows how you can configure additional containers for an application: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.additionalContainers=[{name: 'c1', image: 'busybox:1', command: ['sh', '-c', 'echo hello1'], volumeMounts: [{name: 'test-volume', mountPath: '/tmp', readOnly: true}]},{name: 'c2', image: 'busybox:1.26.1', command: ['sh', '-c', 'echo hello2']}] +---- +==== diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc index d913bd8537..878d33d255 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-kubernetes.adoc @@ -22,6 +22,9 @@ By default, all the features are enabled. The `/features` REST endpoint provides information on the features that have been enabled and disabled. +[[configuration-kubernetes-app-props]] +include::configuration-kubernetes-app-properties.adoc[] + [[configuration-kubernetes-deployer]] === Deployer Properties You can use the following configuration properties the https://github.com/spring-cloud/spring-cloud-deployer-kubernetes[Kubernetes deployer] to customize how Streams and Tasks are deployed. @@ -69,6 +72,26 @@ These properties are also used when configuring the < +|startupProbeDelay +|Delay in seconds when the Kubernetes startup check of the app container should start checking its health status. +|30 + +|startupProbePeriod +|Period in seconds for performing the Kubernetes startup check of the app container. +|3 + +|startupProbeFailure +|Number of probe failures allowed for the startup probe before the pod is restarted. +|20 + +|startupHttpProbePath +|Path that app container has to respond to for startup check. +| + +|startupProbePort +|Port that app container has to respond on for startup check. +| + |readinessProbeDelay |Delay in seconds when the readiness check of the app container should start checking if the module is fully up and running. |10 @@ -101,6 +124,18 @@ These properties are also used when configuring the < +|limits.ephemeral-storage +|The ephemeral-storage limit, maximum needed value to allocate a pod. +| + +|limits.hugepages-2Mi +|The hugepages-2Mi limit, maximum needed value to allocate a pod. +| + +|limits.hugepages-1Gi +|The hugepages-1Gi limit, maximum needed value to allocate a pod. +| + |requests.memory |The memory request, guaranteed needed value to allocate a pod. | @@ -109,6 +144,30 @@ These properties are also used when configuring the < +|requests.ephemeral-storage +|The ephemeral-storage request, guaranteed needed value to allocate a pod. +| + +|requests.hugepages-2Mi +|The hugepages-2Mi request, guaranteed needed value to allocate a pod. +| + +|requests.hugepages-1Gi +|The hugepages-1Gi request, guaranteed needed value to allocate a pod. +| + +|affinity.nodeAffinity +|The node affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { nodeSelectorTerms: [ { matchExpressions: [ { key: 'kubernetes.io/e2e-az-name', operator: 'In', values: [ 'e2e-az1', 'e2e-az2']}]}]}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, preference: { matchExpressions: [ { key: 'another-node-label-key', operator: 'In', values: [ 'another-node-label-value' ]}]}}]}``` +| + +|affinity.podAffinity +|The pod affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: [ { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostnam'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +| + +|affinity.podAntiAffinity +|The pod anti-affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostname'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +| + |statefulSet.volumeClaimTemplate.storageClassName |Name of the storage class for a stateful set | @@ -141,6 +200,14 @@ These properties are also used when configuring the < +|priorityClassName +|Pod Spec priorityClassName. Create a PriorityClass in Kubernetes before using this property. See https://kubernetes.io/docs/concepts/scheduling-eviction/pod-priority-preemption/[Pod Priority and Preemption] +| + +|shareProcessNamespace +| Will assign value to Pod.spec.shareProcessNamespace. See https://kubernetes.io/docs/tasks/configure-pod-container/share-process-namespace/[Share Process Namespace between Containers in a Pod] +| + |minutesToWaitForLoadBalancer |Time to wait for load balancer to be available before attempting delete of service (in minutes). |5 @@ -186,7 +253,7 @@ These properties are also used when configuring the < |deploymentServiceAccountName -|Service account name to use for app deployments +|Service account name used in app deployments. Note: The service account name used for app deployments is derived from the Data Flow servers deployment. | |deploymentLabels @@ -250,27 +317,105 @@ These properties are also used when configuring the < |maximumConcurrentTasks -|The maximum concurrent tasks allowed for this platform instance. +|The maximum concurrent tasks allowed for this platform instance |20 +[[pod-security-context-props]] +|podSecurityContext +|The security context applied to the pod expressed in YAML format. e.g. ```{runAsUser: 65534, fsGroup: 65534, supplementalGroups: [65534, 65535], seccompProfile: { type: 'RuntimeDefault' }}```. Note this defines the entire pod security context - smaller portions of the security context can instead be configured via the `podSecurityContext.**` properties below. +| + |podSecurityContext.runAsUser |The numeric user ID to run pod container processes under | +|podSecurityContext.runAsGroup +|The numeric group id to run the entrypoint of the container process +| + +|podSecurityContext.runAsNonRoot +|Indicates that the container must run as a non-root user +| + |podSecurityContext.fsGroup -|The numeric group ID to run pod container processes under +|The numeric group ID for the volumes of the pod | -|affinity.nodeAffinity -|The node affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { nodeSelectorTerms: [ { matchExpressions: [ { key: 'kubernetes.io/e2e-az-name', operator: 'In', values: [ 'e2e-az1', 'e2e-az2']}]}]}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, preference: { matchExpressions: [ { key: 'another-node-label-key', operator: 'In', values: [ 'another-node-label-value' ]}]}}]}``` +|podSecurityContext.fsGroupChangePolicy +|Defines behavior of changing ownership and permission of the volume before being exposed inside pod (only applies to volume types which support fsGroup based ownership and permissions) - possible values are "OnRootMismatch", "Always" | -|affinity.podAffinity -|The pod affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: [ { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostnam'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +|podSecurityContext.supplementalGroups +|The numeric group IDs applied to the pod container processes, in addition to the container's primary group ID | -|affinity.podAntiAffinity -|The pod anti-affinity expressed in YAML format. e.g. ```{ requiredDuringSchedulingIgnoredDuringExecution: { labelSelector: { matchExpressions: [ { key: 'app', operator: 'In', values: [ 'store']}]}], topologyKey: 'kubernetes.io/hostname'}, preferredDuringSchedulingIgnoredDuringExecution: [ { weight: 1, podAffinityTerm: { labelSelector: { matchExpressions: [ { key: 'security', operator: 'In', values: [ 'S2' ]}]}, topologyKey: 'failure-domain.beta.kubernetes.io/zone'}}]}``` +|podSecurityContext.seccompProfile +|The seccomp options to use for the pod containers expressed in YAML format. e.g. ```{ seccompProfile: { type: 'Localhost', localhostProfile: 'my-profiles/profile-allow.json' }}``` +| + +|podSecurityContext.seLinuxOptions +|The SELinux context to be applied to the pod containers expressed in YAML format. e.g. ```{ level: "s0:c123,c456" }``` (not used when spec.os.name is windows). +| + +|podSecurityContext.sysctls +|List of namespaced sysctls used for the pod expressed in YAML format. e.g. ```[{name: "kernel.shm_rmid_forced", value: 0}]``` (not used when spec.os.name is windows). +| + +|podSecurityContext.windowsOptions +|The Windows specific settings applied to all containers expressed in YAML format. e.g. ```{ gmsaCredentialSpec: "specA", gmsaCredentialSpecName: "specA-name"}``` (only used when spec.os.name is windows). +| + +[[container-security-context-props]] +|containerSecurityContext +|The security context applied to the containers expressed in YAML format. e.g. ```{allowPrivilegeEscalation: true, runAsUser: 65534}```. Note this defines the entire container security context - smaller portions of the security context can instead be configured via the `containerSecurityContext.**` properties below. +| + +|containerSecurityContext.allowPrivilegeEscalation +|Whether a process can gain more privileges than its parent process +| + +|containerSecurityContext.capabilities +|The capabilities to add/drop when running the container expressed in YAML format. e.g. ```{ add: [ "a", "b" ], drop: [ "c" ] }``` (only used when spec.os.name is not windows) +| + +|containerSecurityContext.privileged +|Run container in privileged mode. +| + +|containerSecurityContext.procMount +|The type of proc mount to use for the container (only used when spec.os.name is not windows) +| + +|containerSecurityContext.readOnlyRootFilesystem +|Mounts the container's root filesystem as read-only +| + +|containerSecurityContext.runAsUser +|The numeric user ID to run pod container processes under +| + +|containerSecurityContext.runAsGroup +|The numeric group id to run the entrypoint of the container process +| + +|containerSecurityContext.runAsNonRoot +|Indicates that the container must run as a non-root user +| + +|containerSecurityContext.seccompProfile +|The seccomp options to use for the pod containers expressed in YAML format. e.g. ```{ seccompProfile: { type: 'Localhost', localhostProfile: 'my-profiles/profile-allow.json' }}``` +| + +|containerSecurityContext.seLinuxOptions +|The SELinux context to be applied to the pod containers expressed in YAML format. e.g. ```{ level: "s0:c123,c456" }``` (not used when spec.os.name is windows). +| + +|containerSecurityContext.sysctls +|List of namespaced sysctls used for the pod expressed in YAML format. e.g. ```[{name: "kernel.shm_rmid_forced", value: 0}]``` (not used when spec.os.name is windows). +| + +|containerSecurityContext.windowsOptions +|The Windows specific settings applied to all containers expressed in YAML format. e.g. ```{ gmsaCredentialSpec: "specA", gmsaCredentialSpecName: "specA-name"}``` (only used when spec.os.name is windows). | |statefulSetInitContainerImageName @@ -278,9 +423,12 @@ These properties are also used when configuring the < |initContainer -|An Init Container experessed in YAML format to be applied to a pod. e.g. ```{containerName: 'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']}``` +|An Init Container expressed in YAML format to be applied to a pod. e.g. ```{containerName: 'test', imageName: 'busybox:1', commands: ['sh', '-c', 'echo hello']}``` | +|additionalContainers +|Additional containers expressed in YAML format to be applied to a pod. e.g. ```[{name: 'c1', image: 'busybox:1', command: ['sh', '-c', 'echo hello1'], volumeMounts: [{name: 'test-volume', mountPath: '/tmp', readOnly: true}]}, {name: 'c2', image: 'busybox:1.26.1', command: ['sh', '-c', 'echo hello2']}]``` +| |=== [[configuration-kubernetes-tasks]] @@ -305,7 +453,7 @@ spring: accounts: dev: namespace: devNamespace - imagePullPolicy: Always + imagePullPolicy: IfNotPresent entryPointStyle: exec limits: cpu: 4 @@ -337,7 +485,7 @@ The Spring Cloud Data Flow server for Kubernetes uses the https://github.com/fab You can pass configuration properties to the Data Flow Server by using Kubernetes https://kubernetes.io/docs/tasks/configure-pod-container/configmap/[ConfigMap] and https://kubernetes.io/docs/concepts/configuration/secret/[secrets]. -The following example shows one possible configuration, which enables MySQL and sets a memory limit: +The following example shows one possible configuration, which enables MariaDB and sets a memory limit: [source,yaml] @@ -361,17 +509,17 @@ data: limits: memory: 1024Mi datasource: - url: jdbc:mysql://${MYSQL_SERVICE_HOST}:${MYSQL_SERVICE_PORT}/mysql + url: jdbc:mariadb://${MARIADB_SERVICE_HOST}:${MARIADB_SERVICE_PORT}/database username: root - password: ${mysql-root-password} + password: ${database-password} driverClassName: org.mariadb.jdbc.Driver testOnBorrow: true validationQuery: "SELECT 1" ---- -The preceding example assumes that MySQL is deployed with `mysql` as the service name. Kubernetes publishes the host and port values of these services as environment variables that we can use when configuring the apps we deploy. +The preceding example assumes that MariaDB is deployed with `mariadb` as the service name. Kubernetes publishes the host and port values of these services as environment variables that we can use when configuring the apps we deploy. -We prefer to provide the MySQL connection password in a Secrets file, as the following example shows: +We prefer to provide the MariaDB connection password in a Secrets file, as the following example shows: [source,yaml] @@ -379,25 +527,24 @@ We prefer to provide the MySQL connection password in a Secrets file, as the fol apiVersion: v1 kind: Secret metadata: - name: mysql + name: mariadb labels: - app: mysql + app: mariadb data: - mysql-root-password: eW91cnBhc3N3b3Jk + database-password: eW91cnBhc3N3b3Jk ---- The password is a base64-encoded value. [[configuration-kubernetes-rdbms]] -=== Database Configuration +=== Database -Spring Cloud Data Flow provides schemas for H2, HSQLDB, MySQL, Oracle, PostgreSQL, DB2, and SQL Server. The appropriate schema is automatically created when the server starts, provided the right database driver and appropriate credentials are in the classpath. +include::configuration-database.adoc[] -The JDBC drivers for MySQL (via MariaDB driver), HSQLDB, PostgreSQL, and embedded H2 are available out of the box. -If you use any other database, you need to put the corresponding JDBC driver jar on the classpath of the server. +==== Database configuration -For instance, if you use MySQL in addition to a password in the secrets file, you could provide the following properties in the ConfigMap: +When running in Kubernetes, the database properties are typically set in the ConfigMap. For instance, if you use MariaDB in addition to a password in the secrets file, you could provide the following properties in the ConfigMap: [source,yaml] @@ -406,17 +553,13 @@ data: application.yaml: |- spring: datasource: - url: jdbc:mysql://${MYSQL_SERVICE_HOST}:${MYSQL_SERVICE_PORT}/mysql + url: jdbc:mariadb://${MARIADB_SERVICE_HOST}:${MARIADB_SERVICE_PORT}/database username: root - password: ${mysql-root-password} - driverClassName: org.mariadb.jdbc.Driver - url: jdbc:mysql://${MYSQL_SERVICE_HOST}:${MYSQL_SERVICE_PORT}/test + password: ${database-password} driverClassName: org.mariadb.jdbc.Driver ---- - -For PostgreSQL, you could use the following configuration: - +Similarly, for PostgreSQL you could use the following configuration: [source,yaml] ---- @@ -430,21 +573,6 @@ data: driverClassName: org.postgresql.Driver ---- - -For HSQLDB, you could use the following configuration: - - -[source,yaml] ----- -data: - application.yaml: |- - spring: - datasource: - url: jdbc:hsqldb:hsql://${HSQLDB_SERVICE_HOST}:${HSQLDB_SERVICE_PORT}/database - username: sa - driverClassName: org.hsqldb.jdbc.JDBCDriver ----- - The following YAML snippet from a Deployment is an example of mounting a ConfigMap as `application.yaml` under `/config` where Spring Boot will process it plus a Secret mounted under `/etc/secrets` where it will get picked up by the spring-cloud-kubernetes library due to the environment variable `SPRING_CLOUD_KUBERNETES_SECRETS_PATHS` being set to `/etc/secrets`. [source,yaml] @@ -452,8 +580,8 @@ The following YAML snippet from a Deployment is an example of mounting a ConfigM ... containers: - name: scdf-server - image: springcloud/spring-cloud-dataflow-server:2.5.0.BUILD-SNAPSHOT - imagePullPolicy: Always + image: springcloud/spring-cloud-dataflow-server:2.11.3-SNAPSHOT + imagePullPolicy: IfNotPresent volumeMounts: - name: config mountPath: /config @@ -472,8 +600,8 @@ The following YAML snippet from a Deployment is an example of mounting a ConfigM path: application.yaml - name: database secret: - secretName: mysql ----- + secretName: mariadb +---- You can find migration scripts for specific database types in the https://github.com/spring-cloud/spring-cloud-task/tree/master/spring-cloud-task-core/src/main/resources/org/springframework/cloud/task/migration[spring-cloud-task] repo. @@ -493,12 +621,12 @@ kubectl get all,cm,secrets,pvc ---- -You can list all resources used by a specific application or service by using a label to select resources. The following command lists all resources used by the `mysql` service: +You can list all resources used by a specific application or service by using a label to select resources. The following command lists all resources used by the `mariadb` service: [source,shell] ---- -kubectl get all -l app=mysql +kubectl get all -l app=mariadb ---- @@ -666,15 +794,15 @@ If upgrading from a previous version of SCDF be sure to verify that `spring.data default: secretKeyRefs: - envVarName: "spring.datasource.password" - secretName: mysql - dataKey: mysql-root-password + secretName: mariadb + dataKey: database-password - envVarName: "spring.datasource.username" - secretName: mysql - dataKey: mysql-root-username + secretName: mariadb + dataKey: database-username ... ---- -Also verify that the associated secret(dataKey) is also available in secrets. SCDF provides an example of this for MySql here: `src/kubernetes/mysql/mysql-svc.yaml`. +Also verify that the associated secret(dataKey) is also available in secrets. SCDF provides an example of this for MariaDB here: `src/kubernetes/mariadb/mariadb-svc.yaml`. NOTE: Passing of DB credentials via properties by default is to preserve to backwards compatibility. This will be feature will be removed in future release. @@ -685,7 +813,6 @@ This section covers customization of how scheduled tasks are configured. Schedul NOTE: Unless noted, properties set on a per-schedule basis always take precedence over properties set as the server configuration. This arrangement allows for the ability to override global server level properties for a specific schedule. -See https://github.com/spring-cloud/spring-cloud-scheduler-kubernetes/blob/master/src/main/java/org/springframework/cloud/scheduler/spi/kubernetes/KubernetesSchedulerProperties.java[`KubernetesSchedulerProperties`] for more on the supported options. ==== Entry Point Style @@ -712,7 +839,7 @@ You can also configure the Entry Point Style at the server level in the containe [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_ENTRY_POINT_STYLE +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_ENTRY_POINT_STYLE value: entryPointStyle ---- @@ -723,6 +850,44 @@ You should choose an Entry Point Style of either `exec` or `shell`, to correspon Using the `boot` Entry Point Style corresponds to using the `exec` style `ENTRYPOINT`. Command line arguments from the deployment request are passed to the container, with the addition of application properties mapped into the `SPRING_APPLICATION_JSON` environment variable rather than command line arguments. +===== ttlSecondsAfterFinished + +When scheduling an application, You can clean up finished Jobs (either Complete or Failed) automatically by specifying `ttlSecondsAfterFinished` value. + +The following example shows how you can configure for scheduled application jobs: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.cron.ttlSecondsAfterFinished=86400 +---- +==== + +The following example shows how you can individually configure application jobs: + +==== +[source,options=nowrap] +---- +deployer..kubernetes.ttlSecondsAfterFinished=86400 +---- +==== + +Replace `` with the name of your application and the `ttlSecondsAfterFinished` attribute with the appropriate value for clean up finished Jobs. + +You can configure the `ttlSecondsAfterFinished` at the global server level as well. + +The following example shows how to do so for tasks: + +You can configure an image pull policy at the server level in the container `env` section of a deployment YAML, as the following example shows: + +[source] +---- +env: +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_TTL_SECONDS_AFTER_FINISHED + value: 86400 +---- + + ==== Environment Variables To influence the environment settings for a given application, you can take advantage of the `spring.cloud.deployer.kubernetes.environmentVariables` property. @@ -745,7 +910,7 @@ NOTE: When specifying environment variables in the server configuration and on a [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_ENVIRONMENT_VARIABLES +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_ENVIRONMENT_VARIABLES value: myVar=myVal ---- @@ -765,7 +930,7 @@ The following example shows how you can individually configure containers: [source,options=nowrap] ---- -deployer.kubernetes.imagePullPolicy=Always +deployer.kubernetes.imagePullPolicy=IfNotPresent ---- @@ -777,7 +942,7 @@ You can configure an image pull policy at the server level in the container `env [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_IMAGE_PULL_POLICY +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_IMAGE_PULL_POLICY value: Always ---- @@ -805,7 +970,7 @@ You can also configure the image pull secret at the server level in the containe [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_IMAGE_PULL_SECRET +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_IMAGE_PULL_SECRET value: mysecret ---- @@ -820,7 +985,7 @@ By default the namespace used for scheduled tasks is `default`. This value can b [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_NAMESPACE +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_NAMESPACE value: mynamespace ---- @@ -854,7 +1019,7 @@ You can also configure the service account name at the server level in the conta [source] ---- env: -- name: SPRING_CLOUD_SCHEDULER_KUBERNETES_TASK_SERVICE_ACCOUNT_NAME +- name: SPRING_CLOUD_DEPLOYER_KUBERNETES_TASK_SERVICE_ACCOUNT_NAME value: myserviceaccountname ---- diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc index d315fbdc4b..8f14a22c81 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-local.adoc @@ -26,181 +26,47 @@ By default, stream (requires Skipper), and tasks are enabled and Task Scheduler The REST `/about` endpoint provides information on the features that have been enabled and disabled. -[[configuration-local-rdbms]] -=== Database - -A relational database is used to store stream and task definitions as well as the state of executed tasks. -Spring Cloud Data Flow provides schemas for *H2*, *MySQL*, *Oracle*, *PostgreSQL*, *Db2*, and *SQL Server*. The schema is automatically created when the server starts. - -By default, Spring Cloud Data Flow offers an embedded instance of the *H2* database. The *H2* database is good -for development purposes but is not recommended for production use. - -NOTE: *H2* database is not supported as an external mode. +[[configuration-local-java-home]] +=== Java Home -The JDBC drivers for *MySQL* (through the MariaDB driver), *PostgreSQL*, *SQL Server*, and embedded *H2* are available without additional configuration. -If you are using any other database, then you need to put the corresponding JDBC driver jar on the classpath of the server. +When launching Spring Cloud Data Flow or Skipper Server they may need to know where Java 17 home is in order to successfully launch Spring Boot 3 applications. -The database properties can be passed as environment variables or command-line arguments to the Data Flow Server. +By passing the following property you can provide the path. -==== MySQL - -The following example shows how to define a MySQL database connection using MariaDB driver. - -[source,bash,subs=attributes] ----- +[source,shell] +.... java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mysql://localhost:3306/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=org.mariadb.jdbc.Driver ----- - -MySQL versions up to _5.7_ can be used with a MariaDB driver. Starting from version _8.0_ MySQL's own driver has to be used. - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mysql://localhost:3306/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=com.mysql.jdbc.Driver ----- - -NOTE: Due to licensing restrictions we're unable to bundle MySQL driver. You need to add it to - server's classpath yourself. - -==== MariaDB + --spring.cloud.dataflow.defaults.boot3.local.javaHomePath=/usr/lib/jvm/java-17 \ + --spring.cloud.dataflow.defaults.boot2.local.javaHomePath=/usr/lib/jvm/java-1.8 +.... -The following example shows how to define a MariaDB database connection with command Line arguments - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mariadb://localhost:3306/mydb?useMysqlMetadata=true \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=org.mariadb.jdbc.Driver ----- +[[configuration-local-rdbms]] +=== Database -Starting with MariaDB v2.4.1 connector release, it is required to also add `useMysqlMetadata=true` -to the JDBC URL. This is a required workaround until when MySQL and MariaDB entirely switch as two -different databases. +include::configuration-database.adoc[] -MariaDB version _10.3_ introduced a support for real database sequences which is yet another breaking -change while toolings around these databases fully support MySQL and MariaDB as a separate database -types. Workaround is to use older hibernate dialect which doesn't try to use sequences. +==== Database configuration +When running locally, the database properties can be passed as environment variables or command-line arguments to the Data Flow Server. For example, to start the server with MariaDB using command line arguments execute the following command: [source,bash,subs=attributes] ---- java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:mariadb://localhost:3306/mydb?useMysqlMetadata=true \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MariaDB102Dialect \ + --spring.datasource.url=jdbc:mariadb://localhost:3306/mydb \ + --spring.datasource.username=user \ + --spring.datasource.password=pass \ --spring.datasource.driver-class-name=org.mariadb.jdbc.Driver ---- - -==== PostgreSQL - -The following example shows how to define a PostgreSQL database connection with command line arguments: - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:postgresql://localhost:5432/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=org.postgresql.Driver ----- - -==== SQL Server - -The following example shows how to define a SQL Server database connection with command line arguments: - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url='jdbc:sqlserver://localhost:1433;databaseName=mydb' \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver ----- - -==== Db2 - -The following example shows how to define a Db2 database connection with command line arguments: - +Likewise, to start the server with MariaDB using environment variables execute the following command: [source,bash,subs=attributes] ---- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:db2://localhost:50000/mydb \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=com.ibm.db2.jcc.DB2Driver ----- - -NOTE: Due to licensing restrictions we're unable to bundle Db2 driver. You need to add it to - server's classpath yourself. - -==== Oracle - -The following example shows how to define a Oracle database connection with command line arguments: - -[source,bash,subs=attributes] ----- -java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar \ - --spring.datasource.url=jdbc:oracle:thin:@localhost:1521/MYDB \ - --spring.datasource.username= \ - --spring.datasource.password= \ - --spring.datasource.driver-class-name=oracle.jdbc.OracleDriver ----- - -NOTE: Due to licensing restrictions we're unable to bundle Oracle driver. You need to add it to - server's classpath yourself. - -==== Adding a Custom JDBC Driver -To add a custom driver for the database (for example, Oracle), you should rebuild the Data Flow Server and add the dependency to the Maven `pom.xml` file. -You need to modify the maven `pom.xml` of `spring-cloud-dataflow-server` module. -There are GA release tags in GitHub repository, so you can switch to desired GA tags to add the drivers on the production-ready codebase. - -To add a custom JDBC driver dependency for the Spring Cloud Data Flow server: - -. Select the tag that corresponds to the version of the server you want to rebuild and clone the github repository. -. Edit the spring-cloud-dataflow-server/pom.xml and, in the `dependencies` section, add the dependency for the database driver required. In the following example , an Oracle driver has been chosen: - -[source, xml] ----- - -... - - com.oracle.jdbc - ojdbc8 - 12.2.0.1 - -... - ----- - -[start=3] -. Build the application as described in <> - -You can also provide default values when rebuilding the server by adding the necessary properties to the dataflow-server.yml file, -as shown in the following example for PostgreSQL: - -[source] ----- -spring: - datasource: - url: jdbc:postgresql://localhost:5432/mydb - username: myuser - password: mypass - driver-class-name:org.postgresql.Driver +SPRING_DATASOURCE_URL=jdbc:mariadb://localhost:3306/mydb +SPRING_DATASOURCE_USERNAME=user +SPRING_DATASOURCE_PASSWORD=pass +SPRING_DATASOURCE_DRIVER_CLASS_NAME=org.mariadb.jdbc.Driver +SPRING_JPA_DATABASE_PLATFORM=org.hibernate.dialect.MariaDB106Dialect +java -jar spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-{project-version}.jar ---- -[start=4] -. Alternatively, you can build a custom Spring Cloud Data Flow server with your build files. -There are examples of a custom server builds in our https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/master/custom-dataflow-builds[samples repo] if there is a need to add a driver jars. - [[configuration-local-rdbms-schema]] ==== Schema Handling On default database schema is managed with _Flyway_ which is convenient if it's @@ -221,9 +87,6 @@ Here's a description what happens when _Dataflow_ server is started: may be in place if a shared DB is used. * If schema is empty, flyway assumes to start from a scratch. * Goes through all needed schema migrations. -* Due to historical reasons, if we detect that schema is from _1.7.x_ line - we convert these to structures needed from _2.0.x_ onwards and fully - continue with flyway. [NOTE] ==== @@ -260,6 +123,10 @@ These properties are also used when configuring < +| Path to JDK installation for launching applications depending on their registered Boot version. `bootVersion` should be `2` or `3`. +| System property `java.home` + |shutdownTimeout |Max number of seconds to wait for app shutdown. |30 @@ -302,18 +169,17 @@ The logging configuration is located on the classpath contained in a file named By default, the log file is configured to use: -``` - - -``` +[source,xml] +---- + +---- with the logback configuration for the `RollingPolicy`: - +[source,xml] ---- - - ${LOG_FILE}.log + ${LOG_FILE} @@ -326,15 +192,15 @@ with the logback configuration for the `RollingPolicy`: ${FILE_LOG_PATTERN} - ---- To check the `java.io.tmpdir` for the current Spring Cloud Data Flow Server `local` server, -``` +[source,shell] +---- jinfo | grep "java.io.tmpdir" -``` +---- If you want to change or override any of the properties `LOG_FILE`, `LOG_PATH`, `LOG_TEMP`, `LOG_FILE_MAX_SIZE`, `LOG_FILE_MAX_HISTORY` and `LOG_FILE_TOTAL_SIZE_CAP`, please set them as system properties. @@ -347,7 +213,7 @@ Data Flow Server delegates to the Skipper server the management of the Stream's $ java -jar spring-cloud-dataflow-server-{project-version}.jar --spring.cloud.skipper.client.serverUri=https://192.51.100.1:7577/api ---- -The configuration of show streams are deployed and to which platforms, is done by configuration of `platform accounts` on the Skipper server. +The configuration of how streams are deployed and to which platforms, is done by configuration of `platform accounts` on the Skipper server. See the documentation on https://docs.spring.io/spring-cloud-skipper/docs/current/reference/htmlsingle/#platforms[platforms] for more information. @@ -359,7 +225,7 @@ For Tasks which are Spring Batch Jobs, the job and step execution data is also s As with streams launched by Skipper, Tasks can be launched to multiple platforms. If no platform is defined, a platform named `default` is created using the default values of the class https://github.com/spring-cloud/spring-cloud-deployer-local/blob/master/spring-cloud-deployer-local/src/main/java/org/springframework/cloud/deployer/spi/local/LocalDeployerProperties.java[LocalDeployerProperties], which is summarized in the table <> -To configure new platform accounts for the local platform, provide an entry under the `spring.cloud.dataflow.task.platform.local` section in your `application.yaml` file for via another Spring Boot supported mechanism. +To configure new platform accounts for the local platform, provide an entry under the `spring.cloud.dataflow.task.platform.local` section in your `application.yaml` file or via another Spring Boot supported mechanism. In the following example, two local platform accounts named `localDev` and `localDevDebug` are created. The keys such as `shutdownTimeout` and `javaOpts` are local deployer properties. @@ -390,139 +256,12 @@ You can configure the Data Flow server that is running locally to deploy tasks t Detailed examples for launching and scheduling tasks across multiple platforms, are available in this section https://dataflow.spring.io/docs/recipes/multi-platform-deployment/[Multiple Platform Support for Tasks] on http://dataflow.spring.io. -===== Start Skipper - -[source,bash] ----- -git clone https://github.com/spring-cloud/spring-cloud-skipper.git -cd spring-cloud/spring-cloud-skipper -./mvnw clean package -DskipTests=true -java -jar spring-cloud-skipper-server/target/spring-cloud-skipper-server-2.2.0.BUILD-SNAPSHOT.jar ----- - -===== Start Spring Cloud Data Flow - -[source,bash] ----- -git clone https://github.com/spring-cloud/spring-cloud-dataflow.git -cd spring-cloud-dataflow -./mvnw clean package -DskipTests=true -cd .. ----- - -Create a yaml file scdf.yml with the following contents: - -[source,yaml] ----- -spring: - cloud: - dataflow: - security: - authorization: - provider-role-mappings: - uaa: - map-oauth-scopes: true - role-mappings: - ROLE_CREATE: foo.create - ROLE_DEPLOY: foo.create - ROLE_DESTROY: foo.create - ROLE_MANAGE: foo.create - ROLE_MODIFY: foo.create - ROLE_SCHEDULE: foo.create - ROLE_VIEW: foo.view - security: - oauth2: - client: - registration: - uaa: - redirect-uri: '{baseUrl}/login/oauth2/code/{registrationId}' - authorization-grant-type: authorization_code - client-id: dataflow - client-secret: dataflow - scope: <1> - - openid - - foo.create - - foo.view - provider: - uaa: - jwk-set-uri: http://uaa:8080/uaa/token_keys - token-uri: http://uaa:8080/uaa/oauth/token - user-info-uri: http://uaa:8080/uaa/userinfo <2> - user-name-attribute: user_name - authorization-uri: http://uaa:8080/uaa/oauth/authorize - resourceserver: - opaquetoken: <3> - introspection-uri: http://uaa:8080/uaa/introspect - client-id: dataflow - client-secret: dataflow ----- - -<1> If you use scopes to identify roles, please make sure to also request - the relevant scopes, e.g `dataflow.view`, `dataflow.create` and don't forget to request the `openid` scope -<2> Used to retrieve profile information, e.g. username for display purposes (mandatory) -<3> Used for token introspection and validation (mandatory) - -The `introspection-uri` property is especially important when passing an externally retrieved (opaque) -OAuth Access Token to Spring Cloud Data Flow. In that case Spring Cloud Data Flow will take the OAuth Access, -and use the UAA's https://docs.cloudfoundry.org/api/uaa/version/74.4.0/index.html#introspect-token[Introspect Token Endpoint] -to not only check the validity of the token but also retrieve the associated OAuth scopes from the UAA - -Finally startup Spring Cloud Data Flow: - -[source,bash] ----- -java -jar spring-cloud-dataflow/spring-cloud-dataflow-server/target/spring-cloud-dataflow-server-2.4.0.BUILD-SNAPSHOT.jar --spring.config.additional-location=scdf.yml ----- - -[[configuration-security-role-mapping]] -===== Role Mappings - -By default all roles are assigned to users that login to Spring Cloud Data Flow. -However, you can set the property: - -`spring.cloud.dataflow.security.authorization.provider-role-mappings.uaa.map-oauth-scopes: true` +[[configuration-local-security]] +=== Security Configuration -This will instruct the underlying `DefaultAuthoritiesExtractor` to map -OAuth scopes to the respective authorities. The following scopes are supported: - -* Scope `dataflow.create` maps to the `CREATE` role -* Scope `dataflow.deploy` maps to the `DEPLOY` role -* Scope `dataflow.destroy` maps to the `DESTROY` role -* Scope `dataflow.manage` maps to the `MANAGE` role -* Scope `dataflow.modify` maps to the `MODIFY` role -* Scope `dataflow.schedule` maps to the `SCHEDULE` role -* Scope `dataflow.view` maps to the `VIEW` role - -Additionally you can also map arbitrary scopes to each of the Data Flow roles: - -[source,yaml] ----- -spring: - cloud: - dataflow: - security: - authorization: - provider-role-mappings: - uaa: - map-oauth-scopes: true # <1> - role-mappings: - ROLE_CREATE: dataflow.create # <2> - ROLE_DEPLOY: dataflow.deploy - ROLE_DESTROY: dataflow.destoy - ROLE_MANAGE: dataflow.manage - ROLE_MODIFY: dataflow.modify - ROLE_SCHEDULE: dataflow.schedule - ROLE_VIEW: dataflow.view ----- - -<1> Enables explicit mapping support from OAuth scopes to Data Flow roles -<2> When role mapping support is enabled, you must provide a mapping for -all 7 Spring Cloud Data Flow roles *ROLE_CREATE*, *ROLE_DEPLOY*, *ROLE_DESTROY*, *ROLE_MANAGE*, *ROLE_MODIFY*, *ROLE_SCHEDULE*, *ROLE_VIEW*. - -[TIP] -==== -You can assign an OAuth scope to multiple Spring Cloud Data Flow roles, giving you flexible regarding the granularity of your authorization configuration. -==== +[[configuration-local-security-cloudfoundry-uaa]] +==== CloudFoundry User Account and Authentication (UAA) Server +See the <> configuration section for details how to configure for local testing and development. [[configuration-security-ldap-authentication]] ==== LDAP Authentication @@ -588,62 +327,17 @@ uaac user get ---- ==== -[[configuration-security-ldap-uaa-example]] -===== LDAP Security and UAA Example Application - -In order to get up and running quickly and to help you understand the security architecture, we -provide the https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/master/security-ldap-uaa-example[LDAP Security and UAA Example] -on GitHub. - -[IMPORTANT] -==== -This is solely a demo/example application and shall not be used in production. -==== - -The setup consists of: - -* Spring Cloud Data Flow Server -* Skipper Server -* CloudFoundry User Account and Authentication (UAA) Server -* Lightweight Directory Access Protocol (LDAP) Server (provided by https://directory.apache.org/[Apache Directory Server] (ApacheDS)) - -Ultimately, as part of this example, you will learn how to configure and launch -a Composed Task using this security setup. - [[configuration-security-spring-security-oauth2-example]] ==== Spring Security OAuth2 Resource/Authorization Server Sample For local testing and development, you may also use the Resource and Authorization Server support provided by -https://projects.spring.io/spring-security-oauth/[Spring Security OAuth]. It -allows you to easily create your own (very basic) OAuth2 Server with the following simple annotations: - -* `@EnableResourceServer` -* `@EnableAuthorizationServer` +https://spring.io/projects/spring-security/[Spring Security]. It +allows you to easily create your own OAuth2 Server by configuring the SecurityFilterChain. -NOTE: In fact the UAA uses Spring Security OAuth2 under the covers, thus the basic endpoints -are the same. +Samples can be found at: +https://docs.spring.io/spring-security/reference/samples.html[Spring Security Samples] -A working example application can be found at: -https://github.com/ghillert/oauth-test-server/[https://github.com/ghillert/oauth-test-server/] - -Clone the project and configure Spring Cloud Data Flow with the respective Client ID and Client Secret: - -[source,yaml] ----- -security: - oauth2: - client: - client-id: myclient - client-secret: mysecret - access-token-uri: http://127.0.0.1:9999/oauth/token - user-authorization-uri: http://127.0.0.1:9999/oauth/authorize - resource: - user-info-uri: http://127.0.0.1:9999/me - token-info-uri: http://127.0.0.1:9999/oauth/check_token ----- - -IMPORTANT: This sample application is not intended for production use [[configuration-security-shell-authentication]] ==== Data Flow Shell Authentication @@ -658,7 +352,7 @@ $ java -jar spring-cloud-dataflow-shell-{project-version}.jar \ --dataflow.uri=http://localhost:9393 \ # <1> --dataflow.username=my_username \ # <2> --dataflow.password=my_password \ # <3> - --skip-ssl-validation true \ # <4> + --skip-ssl-validation \ # <4> ---- <1> Optional, defaults to http://localhost:9393. @@ -679,7 +373,7 @@ server-unknown:>dataflow config server \ --uri http://localhost:9393 \ # <1> --username myuser \ # <2> --password mysecret \ # <3> - --skip-ssl-validation true \ # <4> + --skip-ssl-validation \ # <4> ---- <1> Optional, defaults to http://localhost:9393. @@ -691,7 +385,7 @@ The following image shows a typical shell command to connect to and authenticate Flow Server: .Target and Authenticate with the Data Flow Server from within the Shell -image::{dataflow-asciidoc}/images/dataflow-security-shell-target.png[Target and Authenticate with the Data Flow Server from within the Shell, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-security-shell-target.png[Target and Authenticate with the Data Flow Server from within the Shell, scaledwidth="100%"] Once successfully targeted, you should see the following output: @@ -728,7 +422,7 @@ $ java -jar spring-cloud-dataflow-shell-{project-version}.jar \ ==== -=== About Configuration +=== About API Configuration The Spring Cloud Data Flow About Restful API result contains a display name, version, and, if specified, a URL for each of the major dependencies that comprise Spring Cloud Data Flow. The result (if enabled) also contains the @@ -736,34 +430,52 @@ sha1 and or sha256 checksum values for the shell dependency. The information that is returned for each of the dependencies is configurable by setting the following properties: -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.name: the -name to be used for the core. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.version: -the version to be used for the core. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.name: the -name to be used for the dashboard. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.version: -the version to be used for the dashboard. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.name: the -name to be used for the implementation. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.version: -the version to be used for the implementation. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.name: the -name to be used for the shell. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.version: -the version to be used for the shell. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.url: -the URL to be used for downloading the shell dependency. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1: the sha1 -checksum value that is returned with the shell dependency info. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256: -the sha256 checksum value that is returned with the shell dependency info. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1-url: -if the `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1` -is not specified, SCDF uses the contents of the file specified at this URL for the checksum. -* spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256-url: -if the `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256` -is not specified, SCDF uses the contents of the file specified at this URL for the checksum. +[frame="none"] +[cols="6,4"] +|=== +|Property Name | Description + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.name# +|[.small]#Name to be used for the core# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-core.version# +|[.small]#Version to be used for the core# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.name# +|[.small]#Name to be used for the dashboard# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-dashboard.version# +|[.small]#Version to be used for the dashboard# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.name# +|[.small]#Name to be used for the implementation# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-implementation.version# +|[.small]#Version to be used for the implementation# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.name# +|[.small]#Name to be used for the shell# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.version# +|[.small]#Version to be used for the shell# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.url# +|[.small]#URL to be used for downloading the shell dependency# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1# +|[.small]#Sha1 checksum value that is returned with the shell dependency info# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256# +|[.small]#Sha256 checksum value that is returned with the shell dependency info# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1-url# +|[.small]#if `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha1` +is not specified, SCDF uses the contents of the file specified at this URL for the checksum# + +|[.small]#spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256-url# +|[.small]#if the `spring.cloud.dataflow.version-info.spring-cloud-dataflow-shell.checksum-sha256` is not specified, SCDF uses the contents of the file specified at this URL for the checksum# +|=== + ==== Enabling Shell Checksum values By default, checksum values are not displayed for the shell dependency. If @@ -774,13 +486,21 @@ you need this feature enabled, set the There are reserved values (surrounded by curly braces) that you can insert into the URL that will make sure that the links are up to date: -* repository: if using a build-snapshot, milestone, or release candidate of +* `repository`: if using a build-snapshot, milestone, or release candidate of Data Flow, the repository refers to the repo-spring-io repository. Otherwise, it refers to Maven Central. -* version: Inserts the version of the jar/pom. +* `version`: Inserts the version of the jar/pom. For example, -`https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/\{version}/spring-cloud-dataflow-shell-\{version}.jar` + +[source] +---- +https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/{version}/spring-cloud-dataflow-shell-\{version}.jar +---- produces -`https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/1.2.3.RELEASE/spring-cloud-dataflow-shell-1.2.3.RELEASE.jar` -if you were using the 1.2.3.RELEASE version of the Spring Cloud Data Flow Shell + +[source] +---- +https://myrepository/org/springframework/cloud/spring-cloud-dataflow-shell/2.1.4/spring-cloud-dataflow-shell-2.11.0.jar +---- +if you were using the `2.11.0` version of the Spring Cloud Data Flow Shell. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc index 3dbd0f47ae..c2c17c5085 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc @@ -2,8 +2,8 @@ = Configuration [[configuration-maven]] -== Maven - +== Maven Resources +Spring Cloud Dataflow supports referencing artifacts via Maven (`maven:`). If you want to override specific Maven configuration properties (remote repositories, proxies, and others) or run the Data Flow Server behind a proxy, you need to specify those properties as command-line arguments when you start the Data Flow Server, as shown in the following example: @@ -319,7 +319,7 @@ Do not forget to target the Data Flow Server with the following command: ==== [source,bash] ---- -dataflow:> dataflow config server https://localhost:8443/ +dataflow:> dataflow config server --uri https://localhost:8443/ ---- ==== @@ -431,9 +431,10 @@ access the REST API. To do so, retrieve an OAuth2 Access Token from your OAuth2 provider and pass that access token to the REST Api by using the *Authorization* HTTP header, as follows: -``` +[source, shell] +---- $ curl -H "Authorization: Bearer " http://localhost:9393/ -H 'Accept: application/json' -``` +---- [[configuration-security-customizing-authorization]] ==== Customizing Authorization @@ -454,7 +455,87 @@ setting the boolean property `map-oauth-scopes` for your provider to `true` (the For example, if your provider's ID is `uaa`, the property would be `spring.cloud.dataflow.security.authorization.provider-role-mappings.uaa.map-oauth-scopes`. -For more details, see the chapter on <>. +[[configuration-security-role-mapping]] +===== Role Mappings + +By default all roles are assigned to users that login to Spring Cloud Data Flow. +However, you can set the property: + +`spring.cloud.dataflow.security.authorization.provider-role-mappings.uaa.map-oauth-scopes: true` + +This will instruct the underlying `DefaultAuthoritiesExtractor` to map +OAuth scopes to the respective authorities. The following scopes are supported: + +* Scope `dataflow.create` maps to the `CREATE` role +* Scope `dataflow.deploy` maps to the `DEPLOY` role +* Scope `dataflow.destroy` maps to the `DESTROY` role +* Scope `dataflow.manage` maps to the `MANAGE` role +* Scope `dataflow.modify` maps to the `MODIFY` role +* Scope `dataflow.schedule` maps to the `SCHEDULE` role +* Scope `dataflow.view` maps to the `VIEW` role + +Additionally you can also map arbitrary scopes to each of the Data Flow roles: + +[source,yaml] +---- +spring: + cloud: + dataflow: + security: + authorization: + provider-role-mappings: + uaa: + map-oauth-scopes: true # <1> + role-mappings: + ROLE_CREATE: dataflow.create # <2> + ROLE_DEPLOY: dataflow.deploy + ROLE_DESTROY: dataflow.destoy + ROLE_MANAGE: dataflow.manage + ROLE_MODIFY: dataflow.modify + ROLE_SCHEDULE: dataflow.schedule + ROLE_VIEW: dataflow.view +---- + +<1> Enables explicit mapping support from OAuth scopes to Data Flow roles +<2> When role mapping support is enabled, you must provide a mapping for +all 7 Spring Cloud Data Flow roles *ROLE_CREATE*, *ROLE_DEPLOY*, *ROLE_DESTROY*, *ROLE_MANAGE*, *ROLE_MODIFY*, *ROLE_SCHEDULE*, *ROLE_VIEW*. + +[TIP] +==== +You can assign an OAuth scope to multiple Spring Cloud Data Flow roles, giving you flexible regarding the granularity of your authorization configuration. +==== + +[[configuration-security-group-mapping]] +===== Group Mappings + +Mapping roles from scopes has its own problems as it may not be always possible +to change those in a given identity provider. If it's possible to define group claims +in a token returned from an identity provider, these can be used as well to +map into server roles. + +==== +[source,yaml] +---- +spring: + cloud: + dataflow: + security: + authorization: + provider-role-mappings: + uaa: + map-oauth-scopes: false + map-group-claims: true + group-claim: roles + group-mappings: + ROLE_CREATE: my-group-id + ROLE_DEPLOY: my-group-id + ROLE_DESTROY: my-group-id + ROLE_MANAGE: my-group-id + ROLE_MODIFY: my-group-id + ROLE_SCHEDULE: my-group-id + ROLE_VIEW: my-group-id +---- +==== You can also customize the role-mapping behavior by providing your own Spring bean definition that extends Spring Cloud Data Flow's `AuthorityMapper` interface. In that case, @@ -579,6 +660,9 @@ spring: - POST /tasks/executions/* => hasRole('ROLE_DEPLOY') - DELETE /tasks/executions/* => hasRole('ROLE_DESTROY') + - GET /tasks/thinexecutions => hasRole('ROLE_VIEW') + - GET /tasks/thinexecutions/* => hasRole('ROLE_VIEW') + # Task Schedules - GET /tasks/schedules => hasRole('ROLE_VIEW') @@ -647,7 +731,7 @@ which the user is not authorized. ===== Securing the Spring Boot Management Endpoints When security is enabled, the -{spring-boot-docs-reference}/html/production-ready-monitoring.html[Spring Boot HTTP Management Endpoints] +{spring-boot-docs}/#actuator.monitoring[Spring Boot HTTP Management Endpoints] are secured in the same way as the other REST endpoints. The management REST endpoints are available under `/management` and require the `MANAGEMENT` role. @@ -818,3 +902,4 @@ By using the `token_format` parameter, you can request the token to be either: include::configuration-local.adoc[] include::configuration-cloudfoundry.adoc[] include::configuration-kubernetes.adoc[] +include::configuration-carvel.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc index 0f58b77e6e..df36b51d85 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/dashboard.adoc @@ -33,7 +33,7 @@ NOTE: The default Dashboard server port is `9393`. The following image shows the opening page of the Spring Cloud Data Flow dashboard: .The Spring Cloud Data Flow Dashboard -image::{dataflow-asciidoc}/images/dataflow-dashboard-about.png[The Spring Cloud Data Flow Dashboard, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-dashboard-about.png[The Spring Cloud Data Flow Dashboard, scaledwidth="100%"] @@ -46,7 +46,7 @@ You can import a number of applications at once by using the Bulk Import Applica The following image shows a typical list of available applications within the dashboard: .List of Available Applications -image::{dataflow-asciidoc}/images/dataflow-available-apps-list.png[List of available applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-available-apps-list.png[List of available applications, scaledwidth="100%"] @@ -67,8 +67,8 @@ The following examples show typical application definitions: ==== [source,subs=properties] ---- -task.timestamp=maven://org.springframework.cloud.task.app:timestamp-task:1.2.0.RELEASE -processor.transform=maven://org.springframework.cloud.stream.app:transform-processor-rabbit:1.2.0.RELEASE +task.timestamp=maven://org.springframework.cloud.task.app:timestamp-task:3.0.0 +processor.transform=maven://org.springframework.cloud.stream.app:transform-processor-rabbit:5.0.0 ---- ==== @@ -79,7 +79,7 @@ After setting your definitions through one of these routes, click *Import Applic The following image shows an example page of one way to bulk import applications: .Bulk Import Applications -image::{dataflow-asciidoc}/images/dataflow-bulk-import-applications.png[Bulk Import Applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-bulk-import-applications.png[Bulk Import Applications, scaledwidth="100%"] @@ -93,7 +93,7 @@ A list of the used deployment properties is available by clicking on the applica The following image shows an example of the *Runtime* tab in use: .List of Running Applications -image::{dataflow-asciidoc}/images/dataflow-runtime.png[List of running applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-runtime.png[List of running applications, scaledwidth="100%"] @@ -121,13 +121,13 @@ Hovering over the boxes in the visual representation shows more details about th In the following screenshot, the `timer` stream has been expanded to show the visual representation: .List of Stream Definitions -image::{dataflow-asciidoc}/images/dataflow-streams-list-definitions.png[List of Stream Definitions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-list-definitions.png[List of Stream Definitions, scaledwidth="100%"] If you click the details button, the view changes to show a visual representation of that stream and any related streams. In the preceding example, if you click details for the `timer` stream, the view changes to the following view, which clearly shows the relationship between the three streams (two of them are tapping into the `timer` stream): .Stream Details Page -image::{dataflow-asciidoc}/images/dataflow-stream-details.png[Stream Details Page, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-details.png[Stream Details Page, scaledwidth="100%"] @@ -148,7 +148,7 @@ The Spring Flo https://github.com/spring-projects/spring-flo/wiki[wiki] includes The following image shows the Flo designer in use: .Flo for Spring Cloud Data Flow -image::{dataflow-asciidoc}/images/dataflow-flo-create-stream.png[Flo for Spring Cloud Data Flo, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-create-stream.png[Flo for Spring Cloud Data Flo, scaledwidth="100%"] @@ -168,17 +168,17 @@ You can switch between both views. TIP: The form builder offers stronger validation of the inputs. .The following image shows the form builder -image::{dataflow-asciidoc}/images/dataflow-stream-deploy-builder.png[Form builder, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-deploy-builder.png[Form builder, scaledwidth="100%"] .The following image shows the same properties in the free text -image::{dataflow-asciidoc}/images/dataflow-stream-deploy-freetext.png[Free text, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-deploy-freetext.png[Free text, scaledwidth="100%"] [[dashboard-stream-logs]] === Accessing Stream Logs Once the stream applications are deployed, their logs can be accessed from the Stream `summary` page, as the following image shows: -image::{dataflow-asciidoc}/images/dataflow-stream-logs.png[Stream Logs, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-stream-logs.png[Stream Logs, scaledwidth="100%"] [[dashboard-flo-streams-designer-fanin-fanout]] === Creating Fan-In and Fan-Out Streams @@ -187,7 +187,7 @@ In the <> chapter, you can learn The UI provides dedicated support for named destinations as well: .Flo for Spring Cloud Data Flow -image::{dataflow-asciidoc}/images/dataflow-flo-create-stream-fanin-fanout.png[Fan-in and Fan-out example, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-create-stream-fanin-fanout.png[Fan-in and Fan-out example, scaledwidth="100%"] In this example, we have data from an _HTTP Source_ and a _JDBC Source_ that is being sent to the _sharedData_ channel, which represents a fan-in use case. @@ -202,7 +202,7 @@ To create the tap stream, connect the output connector of the _HTTP Source_ to t The connection is displayed as a dotted line, indicating that you created a tap stream. .Creating a Tap Stream -image::{dataflow-asciidoc}/images/dataflow-flo-create-tap-stream.png[Tap stream example, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-create-tap-stream.png[Tap stream example, scaledwidth="100%"] The primary stream (_HTTP Source_ to _File Sink_) will be automatically named, in case you did not provide a name for the stream, yet. When creating tap streams, the primary stream must always be explicitly named. @@ -211,7 +211,7 @@ In the preceding image, the primary stream was named _HTTP_INGEST_. By using the Dashboard, you can also switch the primary stream so that it becomes the secondary tap stream. .Change Primary Stream to Secondary Tap Stream -image::{dataflow-asciidoc}/images/dataflow-flo-tap-stream-switch-to-primary-stream.png[Switch tap stream to primary stream, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-tap-stream-switch-to-primary-stream.png[Switch tap stream to primary stream, scaledwidth="100%"] Hover over the existing primary stream, the line between _HTTP Source_ and _File Sink_. Several control icons appear, and, by clicking on the icon labeled _Switch to/from tap_, @@ -219,7 +219,7 @@ you change the primary stream into a tap stream. Do the same for the tap stream and switch it to a primary stream. .End Result of Switching the Primary Stream -image::{dataflow-asciidoc}/images/dataflow-flo-tap-stream-switch-to-primary-stream-result.png[End result of switching the tap stream to a primary stream, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-flo-tap-stream-switch-to-primary-stream-result.png[End result of switching the tap stream to a primary stream, scaledwidth="100%"] TIP: When interacting directly with <>, @@ -233,17 +233,17 @@ The *Import/Export* tab of the Dashboard includes a page that provides the optio The following image shows the streams export page: .Stream Utils Export page -image::{dataflow-asciidoc}/images/dataflow-streams-utils-export.png[Stream Utils Export, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-utils-export.png[Stream Utils Export, scaledwidth="100%"] When importing the streams, you have to import from a valid JSON file. You can either manually draft the file or export the file from the streams export page. .Stream Utils Import page -image::{dataflow-asciidoc}/images/dataflow-streams-utils-import.png[Stream Utils Import, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-utils-import.png[Stream Utils Import, scaledwidth="100%"] After importing the file, you get confirmation of whether the operation completed successfully. .Stream Utils Import Result page -image::{dataflow-asciidoc}/images/dataflow-streams-utils-import-result.png[Stream Utils Import Result, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-streams-utils-import-result.png[Stream Utils Import Result, scaledwidth="100%"] ifndef::omit-tasks-docs[] @@ -269,7 +269,7 @@ TIP: You can also use this tab to create Batch Jobs. The following image shows a typical list of task applications: .List of Task Apps -image::{dataflow-asciidoc}/images/dataflow-task-apps-list.png[List of Task Apps, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-apps-list.png[List of Task Apps, scaledwidth="100%"] On this screen, you can perform the following actions: @@ -288,13 +288,13 @@ This page lists the Data Flow task definitions and provides actions to launch or The following image shows the Definitions page: .List of Task Definitions -image::{dataflow-asciidoc}/images/dataflow-task-definitions-list.png[List of Task Definitions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-definitions-list.png[List of Task Definitions, scaledwidth="100%"] ==== Create a Task Definition The following image shows a task definition composed of the timestamp application as well as the list of task applications that can be used to create a task definiton: -image::{dataflow-asciidoc}/images/dataflow-task-definition-create.png[List of Task Applications, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-definition-create.png[List of Task Applications, scaledwidth="100%"] On this page, you can also specify various properties that are used during the deployment of the application. Once you are satisfied with the task definition, you can click the *CREATE TASK* button. A dialog box then asks for a task definition name and description. At a minimum, you must provide a name for the new definition. @@ -316,7 +316,7 @@ NOTE: Task parameters are not typed. The following image shows the composed task designer: .Composed Task Designer -image::{dataflow-asciidoc}/images/dataflow-ctr-flo-tab.png[Composed Task Designer, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-flo-tab.png[Composed Task Designer, scaledwidth="100%"] @@ -327,7 +327,7 @@ To do so, click the *Tasks* tab and select the task you want to launch by pressi The following image shows the Task Launch page: .Task Launch Page -image::{dataflow-asciidoc}/images/dataflow-task-launch.png[Task Launch, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-launch.png[Task Launch, scaledwidth="100%"] ==== Import/Export Tasks @@ -337,17 +337,17 @@ The *Import/Export* page provides the option to import and export tasks. This The following image shows the tasks export page: .Tasks Utils Export page -image::{dataflow-asciidoc}/images/dataflow-tasks-utils-export.png[Tasks Utils Export, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-tasks-utils-export.png[Tasks Utils Export, scaledwidth="100%"] Similarly, you can import task definitions. To do so, click the *Import/Export* option on the left side of page. From here, click the *Import task(s): Import tasks from a JSON file* option to show the *Import Tasks* page. On the *Import Tasks* page, you have to import from a valid JSON file. You can either manually draft the file or export the file from the *Tasks Export* page. .Tasks Utils Import page -image::{dataflow-asciidoc}/images/dataflow-tasks-utils-import.png[Tasks Utils Import, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-tasks-utils-import.png[Tasks Utils Import, scaledwidth="100%"] After importing the file, you get confirmation on whether the operation completed successfully. .Tasks Utils Import Result page -image::{dataflow-asciidoc}/images/dataflow-tasks-utils-import-result.png[Tasks Utils Import Result, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-tasks-utils-import-result.png[Tasks Utils Import Result, scaledwidth="100%"] @@ -361,14 +361,14 @@ Finally, you can clean up one or more task executions. This operation removes an The following image shows the *Executions* tab: .List of Task Executions -image::{dataflow-asciidoc}/images/dataflow-task-executions-list.png[List of Task Executions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-executions-list.png[List of Task Executions, scaledwidth="100%"] [[dashboard-tasks-execution-detail]] === Execution Detail For each task execution on the *Task Executions* tab, you can retrieve detailed information about a specific execution by clicking the *Execution ID* of the task execution. -image::{dataflow-asciidoc}/images/dataflow-task-execution-detail.png[List of Task Executions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-execution-detail.png[List of Task Executions, scaledwidth="100%"] On this screen, you can view not only the information from the task executions page but also: @@ -391,7 +391,7 @@ Additionally, you can trigger the following operations: To submit a stop task execution request to the platform, click the drop down button next to the task execution that needs to be stopped. Now click the *Stop task* option. The dashboard presents a dialog box asking if you are sure that you want to stop the task execution. If so, click `Stop Task Execution(s)`. -image::{dataflow-asciidoc}/images/dataflow-task-execution-stop.png[Stop Executing Tasks, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-task-execution-stop.png[Stop Executing Tasks, scaledwidth="100%"] NOTE: Child Spring Cloud Task applications launched via Spring Batch applications that use remote partitioning are not stopped. @@ -417,7 +417,7 @@ NOTE: Clicking the stop button actually sends a stop request to the running job, The following image shows the *Jobs* tab: .List of Job Executions -image::{dataflow-asciidoc}/images/dataflow-job-executions-list.png[List of Job Executions, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-job-executions-list.png[List of Job Executions, scaledwidth="100%"] @@ -429,7 +429,7 @@ After you have launched a batch job, the Job Execution Details page shows inform The following image shows the Job Execution Details page: .Job Execution Details -image::{dataflow-asciidoc}/images/dataflow-jobs-job-execution-details.png[Job Execution Details, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-jobs-job-execution-details.png[Job Execution Details, scaledwidth="100%"] The Job Execution Details page contains a list of the executed steps. You can further drill into the details of each step's execution by clicking the magnifying glass icon. @@ -444,7 +444,7 @@ The Step Execution Details page provides information about an individual step wi The following image shows the Step Execution Details page: .Step Execution Details -image::{dataflow-asciidoc}/images/dataflow-step-execution-history.png[Step Execution History, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-step-execution-history.png[Step Execution History, scaledwidth="100%"] The Step Execution Details screen provides a complete list of all Step Execution Context key-value pairs. @@ -459,7 +459,27 @@ When that happens, check the server log files for further details. [[dashboard-job-executions-steps-progress]] === Step Execution History -Under *Step Execution History*, you can also view various metrics associated with the selected step, such as duration, read counts, write counts, and others. +Under *Step Execution History*, you can also view various metrics associated with the selected step, such as duration, read counts, write counts, and others across all of its executions. +For each metric there are 5 attributes: + +* Count - The number of step executions that the metric could have participated. It is not a count for the number of times the event occurred during each step execution. +* Min - The minimum value for the metric across all the executions for this step. +* Max - The maximum value for the metric across all the executions for this step. +* Mean - The mean value for the metric across all the executions for this step. +* Standard Deviation - The standard deviation for the metric across all the executions for this step. + +The Step Execution contains the following metrics: + +* Commit Count - The max, min, mean, and standard deviation for the number of commits of all the executions for the given step. +* Duration - The max, min, mean, and standard deviation for the duration of all the executions for the given step. +* Duration Per Read - The max, min, mean, and standard deviation for the duration per read of all the executions for the given step. +* FilterCount - The max, min, mean, and standard deviation for the number of filters of all the executions for the given step. +* Process Skip Count - The max, min, mean, and standard deviation for the process skips of all the executions for the given step. +* Read Count - The max, min, mean, and standard deviation for the number of reads of all the executions for the given step. +* Read Skip Count - The max, min, mean, and standard deviation for the number of read skips of all the executions for the given step. +* Rollback Count - The max, min, mean, and standard deviation for the number of rollbacks of all the executions for the given step. +* Write Count - The max, min, mean, and standard deviation for the number of writes of all the executions for the given step. +* Write Skip Count - The max, min, mean, and standard deviation for the number of skips of all the executions for the given step. endif::omit-tasks-docs[] @@ -490,13 +510,13 @@ are recorded for: The following image shows the Audit Records page: .List Overview of Audit Records -image::{dataflow-asciidoc}/images/dataflow-audit-records-list.png[List of available audit records, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-audit-records-list.png[List of available audit records, scaledwidth="100%"] By clicking the _show details_ icon (the "`i`" in a circle on the right), you can obtain further details regarding the auditing details: .List Details of an Audit Record -image::{dataflow-asciidoc}/images/dataflow-audit-records-details.png[Details of a single audit record, scaledwidth="100%"] +image::{dataflow-asciidoc-images}/dataflow-audit-records-details.png[Details of a single audit record, scaledwidth="100%"] Generally, auditing provides the following information: diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc index 64105173ab..38331658e5 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-kubernetes.adoc @@ -1,1060 +1,8 @@ [[getting-started-kubernetes]] == Getting Started - Kubernetes -https://cloud.spring.io/spring-cloud-dataflow/[Spring Cloud Data Flow] is a toolkit for building data integration and real-time data-processing pipelines. - -Pipelines consist of Spring Boot applications built with the Spring Cloud Stream or Spring Cloud Task microservice frameworks. -This makes Spring Cloud Data Flow suitable for a range of data-processing use cases, from import-export to event streaming and predictive analytics. - -This project provides support for using Spring Cloud Data Flow with Kubernetes as the runtime for these pipelines, with applications packaged as Docker images. - -See the link:https://dataflow.spring.io/docs/installation/kubernetes/[Kubernetes] section of the microsite for more information on installing Spring Cloud Data Flow on Kubernetes. +This section covers how to get started with Spring Cloud Data Flow running locally on Kubernetes. See xref:configuration-carvel[Deployment using Carvel] and xref:configuration-kubernetes[Configuration - Kubernetes] for more information on installing Spring Cloud Data Flow on Kubernetes. Once you have the Data Flow server installed on Kubernetes, you probably want to get started with orchestrating the deployment of readily available pre-built applications into a coherent streaming or batch data pipelines. We have guides to help you get started with both link:https://dataflow.spring.io/docs/stream-developer-guides/[Stream] and link:https://dataflow.spring.io/docs/batch-developer-guides/[Batch] processing. -=== Application and Server Properties - -This section covers how you can customize the deployment of your applications. You can use a number of properties to influence settings for the applications that are deployed. Properties can be applied on a per-application basis or in the appropriate server configuration for all deployed applications. - -NOTE: Properties set on a per-application basis always take precedence over properties set as the server configuration. This arrangement lets you override global server level properties on a per-application basis. - -Properties to be applied for all deployed Tasks are defined in the `src/kubernetes/server/server-config-[binder].yaml` file and for Streams in `src/kubernetes/skipper/skipper-config-[binder].yaml`. Replace `[binder]` with the messaging middleware you are using -- for example, `rabbit` or `kafka`. - -==== Memory and CPU Settings - -Applications are deployed with default memory and CPU settings. If you need to, you can adjust these values. The following example shows how to set `Limits` to `1000m` for `CPU` and `1024Mi` for memory and `Requests` to `800m` for CPU and `640Mi` for memory: - -==== -[source] ----- -deployer..kubernetes.limits.cpu=1000m -deployer..kubernetes.limits.memory=1024Mi -deployer..kubernetes.requests.cpu=800m -deployer..kubernetes.requests.memory=640Mi ----- -==== - -Those values results in the following container settings being used: - -==== -[source] ----- -Limits: - cpu: 1 - memory: 1Gi -Requests: - cpu: 800m - memory: 640Mi ----- -==== - -You can also control the default values to which to set the `cpu` and `memory` globally. - -The following example shows how to set the CPU and memory for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - limits: - memory: 640mi - cpu: 500m ----- -==== - -The following example shows how to set the CPU and memory for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - limits: - memory: 640mi - cpu: 500m ----- -==== - -The settings we have used so far affect only the settings for the container. They do not affect the memory setting for the JVM process in the container. If you would like to set JVM memory settings, you can set an environment variable to do so. See the next section for details. - -==== Environment Variables - -To influence the environment settings for a given application, you can use the `spring.cloud.deployer.kubernetes.environmentVariables` deployer property. -For example, a common requirement in production settings is to influence the JVM memory arguments. -You can do so by using the `JAVA_TOOL_OPTIONS` environment variable, as the following example shows: - -==== -[source] ----- -deployer..kubernetes.environmentVariables=JAVA_TOOL_OPTIONS=-Xmx1024m ----- -==== - -NOTE: The `environmentVariables` property accepts a comma-delimited string. If an environment variable contains a value -that is also a comma-delimited string, it must be enclosed in single quotation marks -- for example, -`spring.cloud.deployer.kubernetes.environmentVariables=spring.cloud.stream.kafka.binder.brokers='somehost:9092, -anotherhost:9093'` - -This overrides the JVM memory setting for the desired `` (replace `` with the name of your application). - -[[getting-started-kubernetes-probes]] -==== Liveness and Readiness Probes - -The `liveness` and `readiness` probes use paths called `/health` and `/info`, respectively. They use a `delay` of `10` for both and a `period` of `60` and `10` respectively. You can change these defaults when you deploy the stream by using deployer properties. The liveness and readiness probes are applied only to streams. - -The following example changes the `liveness` probe (replace `` with the name of your application) by setting deployer properties: - -==== -[source] ----- -deployer..kubernetes.livenessProbePath=/health -deployer..kubernetes.livenessProbeDelay=120 -deployer..kubernetes.livenessProbePeriod=20 ----- -==== - -You can declare the same as part of the server global configuration for streams, as the following example shows: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - livenessProbePath: /health - livenessProbeDelay: 120 - livenessProbePeriod: 20 ----- -==== - -Similarly, you can swap `liveness` for `readiness` to override the default `readiness` settings. - -By default, port 8080 is used as the probe port. You can change the defaults for both `liveness` and `readiness` probe ports by using deployer properties, as the following example shows: - -==== -[source] ----- -deployer..kubernetes.readinessProbePort=7000 -deployer..kubernetes.livenessProbePort=7000 ----- -==== - -You can declare the same as part of the global configuration for streams, as the following example shows: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - readinessProbePort: 7000 - livenessProbePort: 7000 ----- -==== - -[NOTE] -===== -By default, the `liveness` and `readiness` probe paths use Spring Boot 2.x+ actuator endpoints. To use Spring Boot 1.x actuator endpoint paths, you must adjust the `liveness` and `readiness` values, as the following example shows (replace `` with the name of your application): - -==== -[source] ----- -deployer..kubernetes.livenessProbePath=/health -deployer..kubernetes.readinessProbePath=/info ----- -==== - -To automatically set both `liveness` and `readiness` endpoints on a per-application basis to the default Spring Boot 1.x paths, you can set the following property: - -==== -[source] ----- -deployer..kubernetes.bootMajorVersion=1 ----- -==== - -===== - -You can access secured probe endpoints by using credentials stored in a https://kubernetes.io/docs/concepts/configuration/secret/[Kubernetes secret]. You can use an existing secret, provided the credentials are contained under the `credentials` key name of the secret's `data` block. You can configure probe authentication on a per-application basis. When enabled, it is applied to both the `liveness` and `readiness` probe endpoints by using the same credentials and authentication type. Currently, only `Basic` authentication is supported. - -To create a new secret: - -. Generate the base64 string with the credentials used to access the secured probe endpoints. -+ -Basic authentication encodes a username and a password as a base64 string in the format of `username:password`. -+ -The following example (which includes output and in which you should replace `user` and `pass` with your values) shows how to generate a base64 string: -+ -==== -[source,shell] ----- -$ echo -n "user:pass" | base64 -dXNlcjpwYXNz ----- -==== - -. With the encoded credentials, create a file (for example, `myprobesecret.yml`) with the following contents: -+ -==== -[source] ----- -apiVersion: v1 -kind: Secret -metadata: - name: myprobesecret -type: Opaque -data: - credentials: GENERATED_BASE64_STRING ----- -==== - -. Replace `GENERATED_BASE64_STRING` with the base64-encoded value generated earlier. - -. Create the secret by using `kubectl`, as the following example shows: -+ -==== -[source,shell] ----- -$ kubectl create -f ./myprobesecret.yml -secret "myprobesecret" created ----- -==== - -. Set the following deployer properties to use authentication when accessing probe endpoints, as the following example shows: -+ -==== -[source] ----- -deployer..kubernetes.probeCredentialsSecret=myprobesecret ----- -==== -+ -Replace `` with the name of the application to which to apply authentication. - -==== Using `SPRING_APPLICATION_JSON` - -You can use a `SPRING_APPLICATION_JSON` environment variable to set Data Flow server properties (including the configuration of Maven repository settings) that are common across all of the Data Flow server implementations. These settings go at the server level in the container `env` section of a deployment YAML. The following example shows how to do so: - -==== -[source,options=nowrap] ----- -env: -- name: SPRING_APPLICATION_JSON - value: "{ \"maven\": { \"local-repository\": null, \"remote-repositories\": { \"repo1\": { \"url\": \"https://repo.spring.io/libs-snapshot/"} } } }" ----- -==== - -==== Private Docker Registry - -You can pull Docker images from a private registry on a per-application basis. First, you must create a secret in the cluster. Follow the https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/[Pull an Image from a Private Registry] guide to create the secret. - -Once you have created the secret, you can use the `imagePullSecret` property to set the secret to use, as the following example shows: - -==== -[source] ----- -deployer..kubernetes.imagePullSecret=mysecret ----- -==== - -Replace `` with the name of your application and `mysecret` with the name of the secret you created earlier. - -You can also configure the image pull secret at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - imagePullSecret: mysecret ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - imagePullSecret: mysecret ----- -==== - -Replace `mysecret` with the name of the secret you created earlier. - -==== Annotations - -You can add annotations to Kubernetes objects on a per-application basis. The supported object types are pod `Deployment`, `Service`, and `Job`. Annotations are defined in a `key:value` format, allowing for multiple annotations separated by a comma. For more information and use cases on annotations, see https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/[Annotations]. - -The following example shows how you can configure applications to use annotations: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.podAnnotations=annotationName:annotationValue -deployer..kubernetes.serviceAnnotations=annotationName:annotationValue,annotationName2:annotationValue2 -deployer..kubernetes.jobAnnotations=annotationName:annotationValue ----- -==== - -Replace `` with the name of your application and the value of your annotations. - -==== Entry Point Style - -An entry point style affects how application properties are passed to the container to be deployed. Currently, three styles are supported: - -* `exec` (default): Passes all application properties and command line arguments in the deployment request as container arguments. Application properties are transformed into the format of `--key=value`. -* `shell`: Passes all application properties and command line arguments as environment variables. Each of the applicationor command-line argument properties is transformed into an uppercase string and `.` characters are replaced with `_`. -* `boot`: Creates an environment variable called `SPRING_APPLICATION_JSON` that contains a JSON representation of all application properties. Command line arguments from the deployment request are set as container args. - -NOTE: In all cases, environment variables defined at the server-level configuration and on a per-application basis are sent on to the container as is. - -You can configure an application as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.entryPointStyle= ----- -==== - -Replace `` with the name of your application and `` with your desired entry point style. - -You can also configure the entry point style at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - entryPointStyle: entryPointStyle ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - entryPointStyle: entryPointStyle ----- -==== - -Replace `entryPointStyle` with the desired entry point style. - -You should choose an Entry Point Style of either `exec` or `shell`, to correspond to how the `ENTRYPOINT` syntax is defined in the container's `Dockerfile`. For more information and uses cases on `exec` versus `shell`, see the https://docs.docker.com/engine/reference/builder/#entrypoint[ENTRYPOINT] section of the Docker documentation. - -Using the `boot` entry point style corresponds to using the `exec` style `ENTRYPOINT`. Command line arguments from the deployment request are passed to the container, with the addition of application properties being mapped into the `SPRING_APPLICATION_JSON` environment variable rather than command line arguments. - -NOTE: When you use the `boot` Entry Point Style, the `deployer..kubernetes.environmentVariables` property must not contain `SPRING_APPLICATION_JSON`. - -==== Deployment Service Account - -You can configure a custom service account for application deployments through properties. You can use an existing service account or create a new one. One way to create a service account is by using `kubectl`, as the following example shows: - -==== -[source,shell] ----- -$ kubectl create serviceaccount myserviceaccountname -serviceaccount "myserviceaccountname" created ----- -==== - -Then you can configure individual applications as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.deploymentServiceAccountName=myserviceaccountname ----- -==== - -Replace `` with the name of your application and `myserviceaccountname` with your service account name. - -You can also configure the service account name at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - deploymentServiceAccountName: myserviceaccountname ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - deploymentServiceAccountName: myserviceaccountname ----- -==== - -Replace `myserviceaccountname` with the service account name to be applied to all deployments. - -==== Image Pull Policy - -An image pull policy defines when a Docker image should be pulled to the local registry. Currently, three policies are supported: - -* `IfNotPresent` (default): Do not pull an image if it already exists. -* `Always`: Always pull the image regardless of whether it already exists. -* `Never`: Never pull an image. Use only an image that already exists. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.imagePullPolicy=Always ----- -==== - -Replace `` with the name of your application and `Always` with your desired image pull policy. - -You can configure an image pull policy at the global server level. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - imagePullPolicy: Always ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - imagePullPolicy: Always ----- -==== - -Replace `Always` with your desired image pull policy. - -==== Deployment Labels - -You can set custom labels on objects related to https://kubernetes.io/docs/concepts/workloads/controllers/deployment/[Deployment]. See https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/[Labels] for more information on labels. Labels are specified in `key:value` format. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue ----- -==== - -Replace `` with the name of your application, `myLabelName` with your label name, and `myLabelValue` with the value of your label. - -Additionally, you can apply multiple labels, as the following example shows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.deploymentLabels=myLabelName:myLabelValue,myLabelName2:myLabelValue2 ----- -==== - -==== Tolerations - -Tolerations work with taints to ensure pods are not scheduled onto particular nodes. -Tolerations are set into the pod configuration while taints are set onto nodes. -See the https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/[Taints and Tolerations] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.tolerations=[{key: 'mykey' operator: 'Equal', value: 'myvalue', effect: 'NoSchedule'}] ----- -==== - -Replace `` with the name of your application and the key-value pairs according to your desired toleration configuration. - -You can configure tolerations at the global server level as well. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - tolerations: - - key: mykey - operator: Equal - value: myvalue - effect: NoSchedule ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - tolerations: - - key: mykey - operator: Equal - value: myvalue - effect: NoSchedule ----- -==== - -Replace the `tolerations` key-value pairs according to your desired toleration configuration. - -==== Secret References - -Secrets can be referenced and their entire data contents can be decoded and inserted into the pod environment as individual variables. -See the https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/#configure-all-key-value-pairs-in-a-secret-as-container-environment-variables[Configure all key-value pairs in a Secret as container environment variables] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.secretRefs=testsecret ----- -==== - -You can also specify multiple secrets, as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.secretRefs=[testsecret,anothersecret] ----- -==== - -Replace `` with the name of your application and the `secretRefs` attribute with the appropriate values for your application environment and secret. - -You can configure secret references at the global server level as well. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - secretRefs: - - testsecret - - anothersecret ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - secretRefs: - - testsecret - - anothersecret ----- -==== - -Replace the items of `secretRefs` with one or more secret names. - -==== Secret Key References - -Secrets can be referenced and their decoded value can be inserted into the pod environment. -See the https://kubernetes.io/docs/concepts/configuration/secret/#using-secrets-as-environment-variables[Using Secrets as Environment Variables] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.secretKeyRefs=[{envVarName: 'MY_SECRET', secretName: 'testsecret', dataKey: 'password'}] ----- -==== - -Replace `` with the name of your application and the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your application environment and secret. - -You can configure secret key references at the global server level as well. - -The following example shows how to do so for streams: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - secretKeyRefs: - - envVarName: MY_SECRET - secretName: testsecret - dataKey: password ----- -==== - -The following example shows how to do so for tasks: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - secretKeyRefs: - - envVarName: MY_SECRET - secretName: testsecret - dataKey: password ----- -==== - -Replace the `envVarName`, `secretName`, and `dataKey` attributes with the appropriate values for your secret. - -==== ConfigMap References - -A ConfigMap can be referenced and its entire data contents can be decoded and inserted into the pod environment as individual variables. -See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#configure-all-key-value-pairs-in-a-configmap-as-container-environment-variables[Configure all key-value pairs in a ConfigMap as container environment variables] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.configMapRefs=testcm ----- -==== - -You can also specify multiple ConfigMap instances, as follows: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.configMapRefs=[testcm,anothercm] ----- -==== - -Replace `` with the name of your application and the `configMapRefs` attribute with the appropriate values for your application environment and ConfigMap. - -You can configure ConfigMap references at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - configMapRefs: - - testcm - - anothercm ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - configMapRefs: - - testcm - - anothercm ----- -==== - -Replace the items of `configMapRefs` with one or more secret names. - -==== ConfigMap Key References - -A ConfigMap can be referenced and its associated key value inserted into the pod environment. -See the https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#define-container-environment-variables-using-configmap-data[Define container environment variables using ConfigMap data] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure applications: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.configMapKeyRefs=[{envVarName: 'MY_CM', configMapName: 'testcm', dataKey: 'platform'}] ----- -==== - -Replace `` with the name of your application and the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your application environment and ConfigMap. - -You can configure ConfigMap references at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - configMapKeyRefs: - - envVarName: MY_CM - configMapName: testcm - dataKey: platform ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - configMapKeyRefs: - - envVarName: MY_CM - configMapName: testcm - dataKey: platform ----- -==== - -Replace the `envVarName`, `configMapName`, and `dataKey` attributes with the appropriate values for your ConfigMap. - -==== Pod Security Context - -You can confiure the pod security context to run processes under the specified UID (user ID) or GID (group ID). -This is useful when you want to not run processes under the default `root` UID and GID. -You can define either the `runAsUser` (UID) or `fsGroup` (GID), and you can configure them to work together. -See the https://kubernetes.io/docs/tasks/configure-pod-container/security-context/[Security Context] section of the Kubernetes reference for more information. - -The following example shows how you can individually configure application pods: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.podSecurityContext={runAsUser: 65534, fsGroup: 65534} ----- -==== - -Replace `` with the name of your application and the `runAsUser` and/or `fsGroup` attributes with the appropriate values for your container environment. - -You can configure the pod security context at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - podSecurityContext: - runAsUser: 65534 - fsGroup: 65534 ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - podSecurityContext: - runAsUser: 65534 - fsGroup: 65534 ----- -==== - -Replace the `runAsUser` and/or `fsGroup` attributes with the appropriate values for your container environment. - -==== Service Ports - -When you deploy applications, a kubernetes Service object is created with a default port of `8080`. If the `server.port` property is set, it overrides the default port value. You can add additional ports to the Service object on a per-application basis. You can add multiple ports with a comma delimiter. - -The following example shows how you can configure additional ports on a Service object for an application: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.servicePorts=5000 -deployer..kubernetes.servicePorts=5000,9000 ----- -==== - -Replace `` with the name of your application and the value of your ports. - -==== StatefulSet Init Container - -When deploying an application by using a StatefulSet, an Init Container is used to set the instance index in the pod. -By default, the image used is `busybox`, which you can be customize. - -The following example shows how you can individually configure application pods: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.statefulSetInitContainerImageName=myimage:mylabel ----- -==== - -Replace `` with the name of your application and the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. - -You can configure the StatefulSet Init Container at the global server level as well. - -The following example shows how to do so for streams. Edit the appropriate `skipper-config-(binder).yaml`, replacing `(binder)` with the corresponding binder in use: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - skipper: - server: - platform: - kubernetes: - accounts: - default: - statefulSetInitContainerImageName: myimage:mylabel ----- -==== - -The following example shows how to do so for tasks by editing the `server-config.yaml` file: - -==== -[source,yaml] ----- -data: - application.yaml: |- - spring: - cloud: - dataflow: - task: - platform: - kubernetes: - accounts: - default: - statefulSetInitContainerImageName: myimage:mylabel ----- -==== - -Replace the `statefulSetInitContainerImageName` attribute with the appropriate value for your environment. - -==== Init Containers - -When you deploy applications, you can set a custom Init Container on a per-application basis. -Refer to the https://kubernetes.io/docs/concepts/workloads/pods/init-containers/[Init Containers] section of the Kubernetes reference for more information. - -The following example shows how you can configure an Init Container for an application: - -==== -[source,options=nowrap] ----- -deployer..kubernetes.initContainer={containerName: 'test', imageName: 'busybox:latest', commands: ['sh', '-c', 'echo hello']} ----- -==== - -Replace `` with the name of your application and set the values of the `initContainer` attributes appropriate for your Init Container. +We have prepared scripts to simplify the process of creating a local Minikube or Kind cluster, or to use a remote cluster like GKE or TKG, more at xref:local-k8s-development[Configure Kubernetes for Local Development] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc index a7ea932c3c..698cadf4d3 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started-local.adoc @@ -9,6 +9,6 @@ You can find a gentle introduction to Spring Cloud Data Flow along with installa We then build an introductory Spring Cloud Data Flow application, discussing some core principles as we go. -- -See the link:https://dataflow.spring.io/docs/installation/local/[Local Machine] section of the microsite for more information on setting up docker compose and manual installation. +This section covers how to get started with Spring Cloud Data Flow running locally on Docker Compose. See the link:https://dataflow.spring.io/docs/installation/local/[Local Machine] section of the microsite for more information on installing Spring Cloud Data Flow on Docker Compose. Once you have the Data Flow server installed locally, you probably want to get started with orchestrating the deployment of readily available pre-built applications into coherent streaming or batch data pipelines. We have guides to help you get started with both link:https://dataflow.spring.io/docs/stream-developer-guides/[Stream] and link:https://dataflow.spring.io/docs/batch-developer-guides/[Batch] processing. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc index bc26a483ab..eb1196ebf5 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/getting-started.adoc @@ -1,6 +1,8 @@ [[getting-started]] = Getting Started +NOTE: This version of Spring Cloud Data Flow provides: <> + include::getting-started-local.adoc[] include::getting-started-cloudfoundry.adoc[] include::getting-started-kubernetes.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc index dca2881674..f90bf7589b 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/index.adoc @@ -1,5 +1,5 @@ = Spring Cloud Data Flow Reference Guide -Sabby Anandan; Marius Bogoevici; Eric Bottard; Mark Fisher; Ilayaperumal Gopinathan; Gunnar Hillert; Mark Pollack; Patrick Peralta; Glenn Renfro; Thomas Risberg; Dave Syer; David Turanski; Janne Valkealahti; Oleg Zhurakousky; Jay Bryant; Vinicius Carvalho; Chris Schaefer; Damien Vitrac; Christian Tzolov +Sabby Anandan; Marius Bogoevici; Eric Bottard; Mark Fisher; Ilayaperumal Gopinathan; Gunnar Hillert; Mark Pollack; Patrick Peralta; Glenn Renfro; Thomas Risberg; Dave Syer; David Turanski; Janne Valkealahti; Oleg Zhurakousky; Jay Bryant; Vinicius Carvalho; Chris Schaefer; Damien Vitrac; Christian Tzolov; Claudia Bressi; Chris Bono; Corneil du Plesis :doctype: book :toc: left :toclevels: 4 @@ -8,36 +8,29 @@ Sabby Anandan; Marius Bogoevici; Eric Bottard; Mark Fisher; Ilayaperumal Gopinat :hide-uri-scheme: :docinfo: shared -:spring-cloud-dataflow-docs: https://docs.spring.io/spring-cloud-dataflow/docs/{project-version}/reference -:spring-cloud-dataflow-docs-current: https://docs.spring.io/spring-cloud-dataflow/docs/current-SNAPSHOT/reference/html/ -:spring-cloud-stream-docs: https://docs.spring.io/spring-cloud-stream/docs/current/reference/htmlsingle/index.html -:spring-boot-version: 2.1.1.RELEASE -:spring-cloud-task-version: 2.0.0.RELEASE -:spring-batch-version: 4.1.0.RELEASE -:spring-batch-doc-version: 4.1.x -:composed-task-version: 2.1.0.RELEASE -:spring-boot-docs-reference: https://docs.spring.io/spring-boot/docs/2.1.1.RELEASE/reference -:scs-app-starters-docs: https://docs.spring.io/spring-cloud-stream-app-starters/docs/current/reference/html -:scs-app-starters-docs-htmlsingle: https://docs.spring.io/spring-cloud-stream-app-starters/docs/current/reference/htmlsingle :github-repo: spring-cloud/spring-cloud-dataflow :github-code: https://github.com/{github-repo} :microsite-version: master - -:dataflow-asciidoc: https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow/master/spring-cloud-dataflow-docs/src/main/asciidoc - -:docker-http-source-rabbit-version: 2.1.0.RELEASE -:docker-time-source-rabbit-version: 2.1.0.RELEASE -:docker-log-sink-rabbit-version: 2.1.0.RELEASE -:docker-log-sink-kafka-version: 2.1.0.RELEASE -:docker-http-source-kafka-version: 2.1.0.RELEASE -:docker-time-source-kafka-version: 2.1.0.RELEASE -:docker-timestamp-task-version: 2.0.0.RELEASE +:spring-boot-version: 2.7.14 +:spring-cloud-task-version: 2.4.6 +:spring-batch-version: 4.3.8 +:spring-boot-docs: https://docs.spring.io/spring-boot/docs/{spring-boot-version}/reference/htmlsingle +:scs-stream-apps-docs: https://docs.spring.io/stream-applications/docs/current/reference/html +:dataflow-asciidoc-images: https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow/main/spring-cloud-dataflow-docs/src/main/asciidoc/images + +:docker-http-source-rabbit-version: 5.0.0 +:docker-time-source-rabbit-version: 5.0.0 +:docker-log-sink-rabbit-version: 5.0.0 +:docker-log-sink-kafka-version: 5.0.0 +:docker-http-source-kafka-version: 5.0.0 +:docker-time-source-kafka-version: 5.0.0 +:docker-timestamp-task-version: 3.0.0 ifdef::backend-html5[] Version {project-version} -(C) 2012-2020 Pivotal Software, Inc. +(C) 2012-2021 VMware, Inc. All rights reserved. _Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically._ @@ -47,6 +40,8 @@ endif::backend-html5[] include::preface.adoc[] +include::overview.adoc[] + include::getting-started.adoc[] include::applications.adoc[] @@ -71,8 +66,6 @@ include::tasks-monitoring.adoc[] include::dashboard.adoc[] -include::samples.adoc[] - include::api-guide.adoc[] include::appendix.adoc[] diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/overview.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/overview.adoc new file mode 100644 index 0000000000..765723c43a --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/overview.adoc @@ -0,0 +1,8 @@ +[[overview]] += Overview + +Microservice based Streaming and Batch data processing for Cloud Foundry and Kubernetes. + +Spring Cloud Data Flow provides tools to create complex topologies for streaming and batch data pipelines. The data pipelines consist of Spring Boot apps, built using the Spring Cloud Stream or Spring Cloud Task microservice frameworks. + +Spring Cloud Data Flow supports a range of data processing use cases, from ETL to import/export, event streaming, and predictive analytics. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc index fd280d8c66..988cabaabb 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/preface.adoc @@ -3,10 +3,6 @@ [[dataflow-documentation-about]] == About the documentation -The documentation for this release is available in {spring-cloud-dataflow-docs}/htmlsingle[HTML]. - -The latest copy of the Spring Cloud Data Flow reference guide can be found {spring-cloud-dataflow-docs-current}[here]. - Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in @@ -18,8 +14,8 @@ Having trouble with Spring Cloud Data Flow? We would like to help! * Ask a question. We monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-cloud-dataflow[`spring-cloud-dataflow`]. -* Report bugs with Spring Cloud Data Flow at https://github.com/spring-cloud/spring-cloud-dataflow/issues. -* Chat with the community and developers on https://gitter.im/spring-cloud/spring-cloud-dataflow[Gitter]. +* Report bugs with Spring Cloud Data Flow at https://github.com/{github-repo}/issues. +* Review the latest release notes at https://github.com/{github-repo}/releases. NOTE: All of Spring Cloud Data Flow is open source, including the documentation! If you find problems with the docs or if you just want to improve them, please {github-code}[get involved]. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/samples.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/samples.adoc deleted file mode 100644 index e254efb70a..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/samples.adoc +++ /dev/null @@ -1,36 +0,0 @@ -[[dataflow-samples]] -= Samples - -[partintro] --- -This section shows the available samples. --- - -[[samples-links]] -== Links -Several samples have been created to help you get started on implementing higher-level use cases than the basic Streams and Tasks shown in the reference guide. -The samples are part of a separate https://github.com/spring-cloud/spring-cloud-dataflow-samples[repository] and have their own https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/[reference documentation]. - -The following samples are available: - -.General -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_java_dsl[Java DSL] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#spring-cloud-data-flow-samples-http-cassandra-overview[HTTP to Cassandra] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_http_to_mysql_demo[HTTP to MySQL] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_http_to_gemfire_demo[HTTP to Gemfire] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_gemfire_cq_to_log_demo[Gemfire CQ to Log Demo] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_gemfire_to_log_demo[Gemfire to Log Demo] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_custom_spring_cloud_stream_processor[Custom Processor] - -.Task and Batch -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_batch_job_on_cloud_foundry[Batch Job on Cloud Foundry] -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_batch_file_ingest[Batch File Ingest] - -.Data Science -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_species_prediction[Species Prediction] - -.Functions -* https://docs.spring.io/spring-cloud-dataflow-samples/docs/current/reference/htmlsingle/#_functions_in_spring_cloud_data_flow[Using Spring Cloud Function] - -{sp}+ -{sp}+ diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc index b0697b2eb9..7bfa9faec2 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/shell.adoc @@ -58,16 +58,14 @@ The following listing shows the output of the `help` command: ==== [source,bash] ---- -! - Allows execution of operating system (OS) commands -clear - Clears the console -cls - Clears the console -date - Displays the local date and time -exit - Exits the shell -http get - Make GET request to http endpoint -http post - POST data to http endpoint -quit - Exits the shell -system properties - Shows the shells properties {JB - restore the apostrophe} -version - Displays shell version +Built-In Commands + help: Display help about available commands + stacktrace: Display the full stacktrace of the last error. + clear: Clear the shell screen. + quit, exit: Exit the shell. + history: Display or save the history of previously run commands + version: Show version info + script: Read and execute commands from a file. ---- ==== @@ -77,26 +75,28 @@ Adding the name of the command to `help` shows additional information on how to [source,bash] ---- dataflow:>help stream create -Keyword: stream create -Description: Create a new stream definition - Keyword: ** default ** - Keyword: name - Help: the name to give to the stream - Mandatory: true - Default if specified: '__NULL__' - Default if unspecified: '__NULL__' +NAME + stream create - Create a new stream definition - Keyword: definition - Help: a stream definition, using the DSL (e.g. "http --port=9000 | hdfs") - Mandatory: true - Default if specified: '__NULL__' - Default if unspecified: '__NULL__' +SYNOPSIS + stream create [--name String] [--definition String] --description String --deploy boolean - Keyword: deploy - Help: whether to deploy the stream immediately - Mandatory: false - Default if specified: 'true' - Default if unspecified: 'false' +OPTIONS + --name String + the name to give to the stream + [Mandatory] + + --definition String + a stream definition, using the DSL (e.g. "http --port=9000 | hdfs") + [Mandatory] + + --description String + a short description about the stream + [Optional] + + --deploy boolean + whether to deploy the stream immediately + [Optional, default = false] ---- ==== @@ -109,7 +109,7 @@ You can complete the shell command options in the shell by pressing the `TAB` ke [source,bash] ---- dataflow:>stream create -- -stream create --definition stream create --name +--definition --deploy --description --name ---- ==== @@ -272,7 +272,7 @@ Many applications accept options that are to be interpreted as SpEL expressions, * Literals can be enclosed in either single or double quotes. * Quotes need to be doubled to embed a literal quote. Single quotes inside double quotes need no special treatment, and the reverse is also true. -As a last example, assume you want to use the link:${scs-app-starters-docs}/spring-cloud-stream-modules-processors.html#spring-clound-stream-modules-transform-processor[transform processor]. +As a last example, assume you want to use the link:${scs-stream-apps-docs}/spring-cloud-stream-modules-transform-processor[transform processor]. This processor accepts an `expression` option which is a SpEL expression. It is to be evaluated against the incoming message, with a default of `payload` (which forwards the message payload untouched). It is important to understand that the following statements are equivalent: diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/spring-boot-3x.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/spring-boot-3x.adoc new file mode 100644 index 0000000000..a0445897be --- /dev/null +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/spring-boot-3x.adoc @@ -0,0 +1,99 @@ +[appendix] +[[appendix-boot3]] +== Spring Boot 3.x Support + +=== Stream Applications +Spring Cloud Data Flow supports both Spring Boot `2.x` and `3.x` based Stream applications. + +==== Differences in 3.x +Be aware of the following areas that have changed across versions. + +===== Metrics Configuration Properties +IMPORTANT: The following does **NOT** apply when configuring metrics for the Dataflow or Skipper server as they both run on Spring Boot 2.x. It is only applicable to applications managed by Dataflow. + +The naming of the metrics registry-specific properties differ as follows: + +* `2.x`: `management.metrics.export..` +* `3.x`: `management..metrics.export.` + +.Example: +* `2.x`: `management.metrics.export.prometheus.enabled=true` +* `3.x`: `management.prometheus.metrics.export.enabled=true` + +Be sure that you use the `2.x` format when configuring `2.x` based stream apps and the `3.x` format when configuring `3.x` based stream apps. + +===== Dataflow Metrics Property Replication +By default, Dataflow replicates relevant metrics properties that it has been configured with to all launched stream and task applications. +This replication has been updated to target both the `2.x` and `3.x` expected formats. +In other words, if your `2.x` stream apps are currently inheriting the Dataflow metrics configuration, they will continue to do so for your `3.x` stream apps. + + +==== Pre-packaged Stream Applications +The default <> are based on Spring Boot `2.x`. +To use the latest pre-packaged apps based on Spring Boot `3.x`, you must manually register the apps (relevant coordinates below). + +[cols="a"] +[cols="40%"] +|=== +|[.small]#Stream Applications# + +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-rabbit-maven[RabbitMQ + Maven]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-rabbit-docker[RabbitMQ + Docker]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-kafka-maven[Kafka + Maven]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.stream-apps-kafka-docker[Kafka + Docker]# +|=== + +[cols="a"] +[cols="40%"] +|=== +|[.small]#HTTP Repository Location for Apps# + +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.rabbit-apps-maven-repo-url.properties[RabbitMQ]# +|[.small]#https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/stream-applications-descriptor/2022.0.0/stream-applications-descriptor-2022.0.0.kafka-apps-maven-repo-url.properties[Kafka]# +|=== + +=== Spring Cloud Task / Batch Applications + +The database schemas for Spring Cloud Task 3.x and Spring Batch 5.x have been modified in the versions that forms part of Spring Boot 3.x + +Spring Cloud Data Flow will create set of tables for the Boot 3.x version that is prefixed by `BOOT3_` and will configure the `spring.cloud.task.tablePrefix` and `spring.batch.jdbc.table-prefix` with the correct values. + +In order to know that the specific task is a Boot 3.x application the version will have to be provided as part of registration. The rest endpoints accepts a `bootVersion=3` parameter and the shell commands accepts `--bootVersion 3` + +Since there are now multiple sets of tables that represents task and batch executions, each schema has been assigned a schemaTarget name. This value form part of queries when retrieving execution data. The UI takes care of this by using the embedded resource links. If you are using the REST API directly you will need to update those requests. + +==== Pre-packaged Task / Batch Applications +The default <> are based on Spring Boot `2.x`, Spring Cloud Task `2.x`, and Spring Batch `4.x`. +To use the latest pre-packaged apps based on Spring Boot `3.x`, Spring Cloud Task `3.x`, and Spring Batch `5.x`, you must manually register the apps using the properties below. + +.Maven coordinates +[.small] +[source,properties] +---- +task.timestamp=maven://io.spring:timestamp-task:3.0.0 +task.timestamp.bootVersion=3 + +task.timestamp-batch=maven://io.spring:timestamp-batch-task:3.0.0 +task.timestamp-batch.bootVersion=3 +---- +[.small] + +.Docker coordinates +[.small] +[source,properties] +---- +task.timestamp=docker:springcloudtask/timestamp-task:3.0.0 +task.timestamp.bootVersion=3 + +task.timestamp-batch=docker:springcloudtask/timestamp-batch-task:3.0.0 +task.timestamp-batch.bootVersion=3 +---- +[.small] + +TIP: The properties can be used when registering an app in the Dataflow UI or the Dataflow shell CLI. + +==== Composed Task Runner + +Composed Task Runner for Spring Cloud Data Flow 2.11.x supports the launching of both Spring Boot `3.x`/`2.x`, Spring Cloud Task `3.x`/`2.x`, and Spring Batch `5.x`/`4.x` applications. + +NOTE: When registering Task applications verify that the correct `Spring Boot Version` is selected. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/spring-cloud-stream-overview.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/spring-cloud-stream-overview.adoc deleted file mode 100644 index bc289df590..0000000000 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/spring-cloud-stream-overview.adoc +++ /dev/null @@ -1,80 +0,0 @@ - -[[spring-cloud-stream-overview]] -= Spring Cloud Stream Overview - -[partintro] --- -This section goes into more detail about how you can work with Spring Cloud Stream. It covers topics -such as creating and running stream applications. - -If you're just starting out with Spring Cloud Data Flow, you should probably read the Getting Started guide for "`<>`" , "`<>`", "`<>`" before diving into this section. --- - -== Introducing Spring Cloud Stream - -The Spring Cloud Stream project allows a user to develop and run messaging microservices using Spring Integration and run them locally or in the cloud, either as standalone apps or via Spring Cloud Data Flow. Just add `@EnableBinding` and run your app as a Spring Boot app (single application context). You just need to connect to the physical broker for the bindings, which is automatic if the relevant binder implementation is available on the classpath (e.g. Kafka or RabbitMQ). - -Here's a sample source application (output channel only): - -[source,java] ----- -@SpringBootApplication -@ComponentScan(basePackageClasses=TimerSource.class) -public class TimerSourceApplication { - - public static void main(String[] args) { - SpringApplication.run(TimerSourceApplication.class, args); - } - -} - -@Configuration -@EnableBinding(Source.class) -public class TimerSource { - - @Value("${format}") - private String format; - - @Bean - @InboundChannelAdapter(value = Source.OUTPUT, poller = @Poller(fixedDelay = "${fixedDelay}", maxMessagesPerPoll = "1")) - public MessageSource timerMessageSource() { - return () -> new GenericMessage<>(new SimpleDateFormat(format).format(new Date())); - } - -} ----- - -`@EnableBinding` is parameterized by an interface (in this case `Source`) which declares input and output channels. `Source`, `Sink` and `Processor` are provided off the shelf, but you can define others. Here's the definition of `Source`: - -[source,java] ----- -public interface Source { - @Output("output") - MessageChannel output(); -} ----- - -The `@Output` annotation is used to identify output channels (messages leaving the app) and `@Input` is used to identify input channels (messages entering the app). It is optionally parameterized by a channel name - if the name is not provided the method name is used instead. An implementation of the interface is created for you and can be used in the application context by autowiring it, e.g. into a test case: - -[source,java] ----- -@RunWith(SpringRunner.class) -@SpringBootTest(classes = TimerSourceApplication.class) -@DirtiesContext -public class TimerSourceApplicationTests { - - @Autowired - private Source source - - @Test - public void contextLoads() { - assertNotNull(this.source.output()); - } - -} ----- - -NOTE: In this case there is only one `Source` in the application context so there is no need to qualify it when it is autowired. If there is ambiguity, e.g. if you are composing one app from some others, you can use the `@Bindings` qualifier to inject a specific channel set. The `@Bindings` qualifier takes a parameter which is the class that carries the `@EnableBinding` annotation (in this case the `TimerSource`). - -For more information, including how to run `spring-cloud-stream` applications autonomously (without Spring Cloud Data Flow), -visit the link:https://cloud.spring.io/spring-cloud-stream[Spring Cloud Stream project home page]. diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc index 825d27cc6a..03c8c3cd58 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/streams.adoc @@ -47,7 +47,7 @@ Data flows through the pipeline from left to right. In Data Flow, the Unix command is replaced by a https://cloud.spring.io/spring-cloud-stream/[Spring Cloud Stream] application and each pipe symbol represents connecting the input and output of applications over messaging middleware, such as RabbitMQ or Apache Kafka. Each Spring Cloud Stream application is registered under a simple name. -The registration process specifies where the application can be obtained (for example, in a Maven Repository or a Docker registry). You can find out more about how to register Spring Cloud Stream applications in this <>. +The registration process specifies where the application can be obtained (for example, in a Maven Repository or a Docker registry). In Data Flow, we classify the Spring Cloud Stream applications as Sources, Processors, or Sinks. As a simple example, consider the collection of data from an HTTP Source and writing to a File Sink. @@ -81,77 +81,6 @@ NOTE: When naming a stream definition, keep in mind that each application in the You can use the Stream Application DSL to define custom binding properties for each of the Spring Cloud Stream applications. See the link:https://dataflow.spring.io/docs/feature-guides/streams/stream-application-dsl/[Stream Application DSL] section of the microsite for more information. -Consider the following Java interface, which defines an input method and two output methods: - -==== -[source,java] ----- -public interface Barista { - - @Input - SubscribableChannel orders(); - - @Output - MessageChannel hotDrinks(); - - @Output - MessageChannel coldDrinks(); -} ----- -==== - -Further consider the following Java interface, which is typical for creating a Kafka Streams application: - -==== -[source,java] ----- -interface KStreamKTableBinding { - - @Input - KStream inputStream(); - - @Input - KTable inputTable(); -} ----- -==== - -In these cases with multiple input and output bindings, Data Flow cannot make any assumptions about the flow of data from one application to another. -Therefore, you need to set the binding properties to "`wire up`" the application. -The *Stream Application DSL* uses a "`double pipe`", instead of the "`pipe symbol`", to indicate that Data Flow should not configure the binding properties of the application. Think of `||` as meaning "`in parallel`". -The following example shows such a "`parallel`" definition: - -==== -[source,bash] ----- -dataflow:> stream create --definition "orderGeneratorApp || baristaApp || hotDrinkDeliveryApp || coldDrinkDeliveryApp" --name myCafeStream ----- -==== - -NOTE: Breaking Change! Versions of SCDF Local, Cloud Foundry 1.7.0 to 1.7.2 and SCDF Kubernetes 1.7.0 to 1.7.1 used the `comma` character as the separator between applications. This caused breaking changes in the traditional Stream DSL. While not ideal, changing the separator character was felt to be the best solution with the least impact on existing users. - -This stream has four applications. -`baristaApp` has two output destinations, `hotDrinks` and `coldDrinks`, intended to be consumed by the `hotDrinkDeliveryApp` and `coldDrinkDeliveryApp`, respectively. -When deploying this stream, you need to set the binding properties so that the `baristaApp` sends hot drink messages to the `hotDrinkDeliveryApp` destination and cold drink messages to the `coldDrinkDeliveryApp` destination. -The following listing does so: - -==== -[source,bash,subs=attributes] ----- -app.baristaApp.spring.cloud.stream.bindings.hotDrinks.destination=hotDrinksDest -app.baristaApp.spring.cloud.stream.bindings.coldDrinks.destination=coldDrinksDest -app.hotDrinkDeliveryApp.spring.cloud.stream.bindings.input.destination=hotDrinksDest -app.coldDrinkDeliveryApp.spring.cloud.stream.bindings.input.destination=coldDrinksDest ----- -==== - -If you want to use consumer groups, you need to set the Spring Cloud Stream application properties, `spring.cloud.stream.bindings..producer.requiredGroups` and `spring.cloud.stream.bindings..group`, on the producer and consumer applications respectively. - -Another common use case for the Stream Application DSL is to deploy a HTTP gateway application that sends a synchronous request or reply message to a Kafka or RabbitMQ application. -In this case, both the HTTP gateway application and the Kafka or RabbitMQ application can be a Spring Integration application that does not make use of the Spring Cloud Stream library. - -It is also possible to deploy only a single application using the Stream application DSL. - === Application Properties Each application takes properties to customize its behavior. As an example, the `http` source module exposes a `port` setting that lets the data ingestion port be changed from the default value: @@ -186,13 +115,13 @@ NOTE: Supported Stream `` possibilities are: `source`, `processor`, and The lifecycle of a stream goes through the following stages: -. <> -. <> -. <> -. <> or <> -. <> or <> applications in the Stream. +. Register stream definition +. Create stream using definition +. Deploy stream +. Destroy or undeploy stream +. Upgrade or rollback apps in the stream -https://cloud.spring.io/spring-cloud-skipper/[Skipper] is a server that lets you discover Spring Boot applications and manage their lifecycle on multiple Cloud Platforms. +https://spring.io/projects/spring-cloud-skipper/[Skipper] is a server that lets you discover Spring Boot applications and manage their lifecycle on multiple cloud platforms. Applications in Skipper are bundled as packages that contain the application's resource location, application properties, and deployment properties. You can think of Skipper packages as being analogous to packages found in tools such as `apt-get` or `brew`. @@ -277,22 +206,20 @@ applications built with the RabbitMQ binder, you could do the following: ==== [source,bash] ---- -dataflow:>app register --name http --type source --uri maven://org.springframework.cloud.stream.app:http-source-rabbit:1.2.1.BUILD-SNAPSHOT -dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.1.BUILD-SNAPSHOT +dataflow:>app register --name http --type source --uri maven://org.springframework.cloud.stream.app:http-source-rabbit:5.0.0 +dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:5.0.0 ---- ==== -If you would like to register multiple applications at one time, you can store them in a properties file, -where the keys are formatted as `.` and the values are the URIs. +If you would like to register multiple applications at one time, you can store them in a properties file, where the keys are formatted as `.` and the values are the URIs. -For example, to register the snapshot versions of the `http` and `log` -applications built with the RabbitMQ binder, you could have the following in a properties file (for example, `stream-apps.properties`): +For example, to register the snapshot versions of the `http` and `log` applications built with the RabbitMQ binder, you could have the following in a properties file (for example, `stream-apps.properties`): ==== [source,bash] ---- -source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.2.1.BUILD-SNAPSHOT -sink.log=maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.1.BUILD-SNAPSHOT +source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:5.0.0 +sink.log=maven://org.springframework.cloud.stream.app:log-sink-rabbit:5.0.0 ---- ==== @@ -397,79 +324,70 @@ IMPORTANT: Only pre-registered applications can be used to `deploy`, `update`, o An attempt to update the `mysource` to version `0.0.1` (not registered) fails. [[supported-apps-and-tasks]] -==== Register Supported Applications and Tasks +==== Register Out-of-the-Box Applications and Tasks -For convenience, we have the static files with application-URIs (for both Maven and Docker) available -for all the out-of-the-box stream and task or batch app-starters. You can point to this file and import -all the application-URIs in bulk. Otherwise, as explained previously, you can register them individually or have your own -custom property file with only the required application-URIs in it. We recommend, however, having a "`focused`" -list of desired application-URIs in a custom property file. +For convenience, we have the static files with application-URIs (for both Maven and Docker) available for all the out-of-the-box stream and task applications. +You can point to this file and import all the application-URIs in bulk. +Otherwise, as explained previously, you can register them individually or have your own custom property file with only the required application-URIs in it. +We recommend, however, having a "`focused`" list of desired application-URIs in a custom property file. -===== Spring Cloud Stream App Starters - -The following table includes the `dataflow.spring.io` links to the available Stream Application Starters based on Spring Cloud Stream 2.1.x -and Spring Boot 2.1.x: +[[ootb-stream-apps]] +===== Out-of-the-Box Stream Applications +The following table includes the `dataflow.spring.io` links to the stream applications based on Spring Cloud Stream `3.2.x` and Spring Boot `2.7.x`. [width="100%",frame="topbot",options="header"] |====================== |Artifact Type |Stable Release |SNAPSHOT Release |RabbitMQ + Maven -|https://dataflow.spring.io/rabbitmq-maven-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-rabbit-maven +|https://dataflow.spring.io/rabbitmq-maven-5-0-x +|https://dataflow.spring.io/rabbitmq-maven-5-0-x-snapshot |RabbitMQ + Docker -|https://dataflow.spring.io/rabbitmq-docker-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-rabbit-docker +|https://dataflow.spring.io/rabbitmq-docker-5-0-x +|https://dataflow.spring.io/rabbitmq-docker-5-0-x-snapshot |Apache Kafka + Maven -|https://dataflow.spring.io/kafka-maven-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-kafka-maven +|https://dataflow.spring.io/kafka-maven-5-0-x +|https://dataflow.spring.io/kafka-maven-5-0-x-snapshot |Apache Kafka + Docker -|https://dataflow.spring.io/kafka-docker-latest -|https://dataflow.spring.io/Einstein-BUILD-SNAPSHOT-stream-applications-kafka-docker +|https://dataflow.spring.io/kafka-docker-5-0-x +|https://dataflow.spring.io/kafka-docker-5-0-x-snapshot |====================== +NOTE: By default, the out-of-the-box app's actuator endpoints are secured. You can disable security by deploying streams by setting the following property: `[small]#app.*.spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration#` -NOTE: By default, App Starter actuator endpoints are secured. You can disable security by deploying streams with the -`app.*.spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration` property. On Kubernetes, see the <> section for how to configure security for actuator endpoints. -NOTE: Starting with the Spring Cloud Stream 2.1 GA release, we now have robust interoperability with the Spring Cloud Function -programming model. Building on that, with the Einstein release-train, it is now possible to pick a few Stream App -Starters and compose them into a single application by using the functional-style programming model. Check out the -https://spring.io/blog/2019/01/09/composed-function-support-in-spring-cloud-data-flow["Composed Function Support in -Spring Cloud Data Flow"] blog to learn more about the developer and orchestration-experience with an example. - -===== Spring Cloud Task App Starters - -The following table includes the available Task Application Starters based on Spring Cloud Task 2.1.x and Spring Boot 2.1.x: +[[ootb-task-apps]] +===== Out-of-the-Box Task Applications +The following table includes the `dataflow.spring.io` links to the task applications based on Spring Cloud Task `2.4.x` and Spring Boot `2.7.x`. [width="100%",frame="topbot",options="header"] |====================== |Artifact Type |Stable Release |SNAPSHOT Release |Maven -|https://dataflow.spring.io/task-maven-latest -|https://dataflow.spring.io/Elston-BUILD-SNAPSHOT-task-applications-maven +|https://dataflow.spring.io/task-maven-3-0-x +|https://dataflow.spring.io/task-maven-3-0-x-snapshot |Docker -|https://dataflow.spring.io/task-docker-latest -|https://dataflow.spring.io/Elston-BUILD-SNAPSHOT-task-applications-docker +|https://dataflow.spring.io/task-docker-3-0-x +|https://dataflow.spring.io/task-docker-3-0-x-snapshot |====================== -You can find more information about the available task starters in the https://cloud.spring.io/spring-cloud-task-app-starters/[Task App Starters Project Page] and -related reference documentation. For more information about the available stream starters, look at the https://cloud.spring.io/spring-cloud-stream-app-starters/[Stream App Starters Project Page] -and related reference documentation. +For more information about the available out-of-the-box stream applications see the https://cloud.spring.io/spring-cloud-task-app-starters/[Spring Cloud Stream Applications] project page. + +For more information about the available out-of-the-box task applications see https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-task[timestamp-task] and https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-batch[timestamp-batch] docs. As an example, if you would like to register all out-of-the-box stream applications built with the Kafka binder in bulk, you can use the following command: ==== [source,bash,subs=attributes] ---- -$ dataflow:>app import --uri https://dataflow.spring.io/kafka-maven-latest +$ dataflow:>app import --uri https://dataflow.spring.io/kafka-maven-5-0-x ---- ==== @@ -478,7 +396,7 @@ Alternatively, you can register all the stream applications with the Rabbit bind ==== [source,bash,subs=attributes] ---- -$ dataflow:>app import --uri https://dataflow.spring.io/rabbitmq-maven-latest +$ dataflow:>app import --uri https://dataflow.spring.io/rabbitmq-maven-5-0-x ---- ==== @@ -505,36 +423,12 @@ NOTE: In some cases, the resource is resolved on the server side. In others, the URI is passed to a runtime container instance, where it is resolved. See the specific documentation of each Data Flow Server for more detail. + [[custom-applications]] -==== Creating Custom Applications +==== Register Custom Applications While Data Flow includes source, processor, sink applications, you can extend these applications or write a custom link:https://github.com/spring-cloud/spring-cloud-stream[Spring Cloud Stream] application. - -The process of creating Spring Cloud Stream applications with https://start.spring.io/[Spring Initializr] is detailed in the Spring Cloud Stream {spring-cloud-stream-docs}#_getting_started[documentation]. -You can include multiple binders to an application. -If you do so, see the instructions in <> for how to configure them. - -To support allowing properties, Spring Cloud Stream applications running in Spring Cloud Data Flow can include the Spring Boot `configuration-processor` as an optional dependency, as shown in the following example: - -==== -[source,xml] ----- - - - - org.springframework.boot - spring-boot-configuration-processor - true - - - ----- -==== - -NOTE:Make sure that the `spring-boot-maven-plugin` is included in the POM. -The plugin is necessary for creating the executable jar that is registered with Spring Cloud Data Flow. -Spring Initialzr includes the plugin in the generated POM. - +You can follow the https://dataflow.spring.io/docs/stream-developer-guides/streams/standalone-stream-sample[Stream Development] guide on the Microsite to create your own custom application. Once you have created a custom application, you can register it, as described in <>. [[spring-cloud-dataflow-create-stream]] @@ -559,11 +453,11 @@ The `stream info` command shows useful information about the stream, as shown (w [source,bash] ---- dataflow:>stream info ticktock -╔═══════════╤═════════════════╤══════════╗ -║Stream Name│Stream Definition│ Status ║ -╠═══════════╪═════════════════╪══════════╣ -║ticktock │time | log │undeployed║ -╚═══════════╧═════════════════╧══════════╝ +╔═══════════╤═════════════════╤═══════════╤══════════╗ +║Stream Name│Stream Definition│Description│ Status ║ +╠═══════════╪═════════════════╪═══════════╪══════════╣ +║ticktock │time | log │ │undeployed║ +╚═══════════╧═════════════════╧═══════════╧══════════╝ ---- ==== @@ -591,20 +485,31 @@ The following listing shows the exposed properties for the `time` application: [source,bash,options="nowrap"] ---- dataflow:> app info --name time --type source +Information about source application 'time': +Version: '5.0.0': +Default application version: 'true': +Resource URI: maven://org.springframework.cloud.stream.app:time-source-rabbit:5.0.0 ╔══════════════════════════════╤══════════════════════════════╤══════════════════════════════╤══════════════════════════════╗ ║ Option Name │ Description │ Default │ Type ║ ╠══════════════════════════════╪══════════════════════════════╪══════════════════════════════╪══════════════════════════════╣ -║trigger.time-unit │The TimeUnit to apply to delay│ │java.util.concurrent.TimeUnit ║ -║ │values. │ │ ║ -║trigger.fixed-delay │Fixed delay for periodic │1 │java.lang.Integer ║ -║ │triggers. │ │ ║ -║trigger.cron │Cron expression value for the │ │java.lang.String ║ -║ │Cron Trigger. │ │ ║ -║trigger.initial-delay │Initial delay for periodic │0 │java.lang.Integer ║ -║ │triggers. │ │ ║ -║trigger.max-messages │Maximum messages per poll, -1 │1 │java.lang.Long ║ -║ │means infinity. │ │ ║ -║trigger.date-format │Format for the date value. │ │java.lang.String ║ +║spring.integration.poller.max-│Maximum number of messages to │ │java.lang.Integer ║ +║messages-per-poll │poll per polling cycle. │ │ ║ +║spring.integration.poller.fixe│Polling rate period. Mutually │ │java.time.Duration ║ +║d-rate │exclusive with 'fixedDelay' │ │ ║ +║ │and 'cron'. │ │ ║ +║spring.integration.poller.fixe│Polling delay period. Mutually│ │java.time.Duration ║ +║d-delay │exclusive with 'cron' and │ │ ║ +║ │'fixedRate'. │ │ ║ +║spring.integration.poller.rece│How long to wait for messages │1s │java.time.Duration ║ +║ive-timeout │on poll. │ │ ║ +║spring.integration.poller.cron│Cron expression for polling. │ │java.lang.String ║ +║ │Mutually exclusive with │ │ ║ +║ │'fixedDelay' and 'fixedRate'. │ │ ║ +║spring.integration.poller.init│Polling initial delay. Applied│ │java.time.Duration ║ +║ial-delay │for 'fixedDelay' and │ │ ║ +║ │'fixedRate'; ignored for │ │ ║ +║ │'cron'. │ │ ║ +║time.date-format │Format for the date value. │MM/dd/yy HH:mm:ss │java.lang.String ║ ╚══════════════════════════════╧══════════════════════════════╧══════════════════════════════╧══════════════════════════════╝ ---- ==== @@ -615,6 +520,10 @@ The following listing shows the exposed properties for the `log` application: [source,bash,options="nowrap"] ---- dataflow:> app info --name log --type sink +Information about sink application 'log': +Version: '5.0.0': +Default application version: 'true': +Resource URI: maven://org.springframework.cloud.stream.app:log-sink-rabbit:5.0.0 ╔══════════════════════════════╤══════════════════════════════╤══════════════════════════════╤══════════════════════════════╗ ║ Option Name │ Description │ Default │ Type ║ ╠══════════════════════════════╪══════════════════════════════╪══════════════════════════════╪══════════════════════════════╣ @@ -853,7 +762,7 @@ In this case, Spring Cloud Data Flow states that the stream is invalid because ` To update the stream, use the `stream update` command, which takes either `--properties` or `--propertiesFile` as a command argument. Skipper has an important new top-level prefix: `version`. -The following commands deploy `http | log` stream (and the version of `log` which registered at the time of deployment was `1.1.0.RELEASE`): +The following commands deploy `http | log` stream (and the version of `log` which registered at the time of deployment was `3.2.0`): ==== [source,bash] @@ -871,23 +780,23 @@ Stream Deployment properties: { "log" : { "spring.cloud.deployer.indexed" : "true", "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "1.1.0.RELEASE" + "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "3.2.0" }, "http" : { "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "1.1.0.RELEASE" + "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "3.2.0" } } ---- ==== -Then the following command updates the stream to use the `1.2.0.RELEASE` version of the log application. +Then the following command updates the stream to use the `5.0.0` version of the log application. Before updating the stream with the specific version of the application, we need to make sure that the application is registered with that version: ==== [source,bash] ---- -dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE +dataflow:>app register --name log --type sink --uri maven://org.springframework.cloud.stream.app:log-sink-rabbit:5.0.0 Successfully registered application 'sink:log' ---- ==== @@ -897,7 +806,7 @@ Then we can update the application: ==== [source,bash] ---- -dataflow:>stream update --name httptest --properties version.log=1.2.0.RELEASE +dataflow:>stream update --name httptest --properties version.log=5.0.0 ---- ==== @@ -920,11 +829,11 @@ Stream Deployment properties: { "spring.cloud.deployer.indexed" : "true", "spring.cloud.deployer.count" : "1", "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "1.2.0.RELEASE" + "maven://org.springframework.cloud.stream.app:log-sink-rabbit" : "5.0.0" }, "http" : { "spring.cloud.deployer.group" : "httptest", - "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "1.1.0.RELEASE" + "maven://org.springframework.cloud.stream.app:http-source-rabbit" : "5.0.0" } } ---- @@ -993,7 +902,7 @@ metadata: name: log spec: resource: maven://org.springframework.cloud.stream.app:log-sink-rabbit - version: 1.2.0.RELEASE + version: 3.2.0 applicationProperties: spring.cloud.dataflow.stream.app.label: log spring.cloud.stream.bindings.input.group: httptest @@ -1013,7 +922,7 @@ metadata: name: http spec: resource: maven://org.springframework.cloud.stream.app:http-source-rabbit - version: 1.2.0.RELEASE + version: 3.2.0 applicationProperties: spring.cloud.dataflow.stream.app.label: http spring.cloud.stream.bindings.output.producer.requiredGroups: httptest @@ -1033,6 +942,11 @@ The majority of the deployment and application properties were set by Data Flow You can roll back to a previous version of the stream by using the `stream rollback` command: +When performing a rollback, Skipper creates a new version of the stream and attempts to deploy it. If all the applications in that version start up successfully Skipper marks the rollback as successful and the stream state is `DEPLOYED`. +However, if any application fails to start, Skipper will terminate the associated rollback pods. Skipper still considers the latest, failed deployment as the current version of the stream. If all apps fail to start the state of the stream will be shown as "UNDEPLOYED". If at least one app starts then the stream state will be "PARTIAL". + +NOTE: In the case of a failed rollback, the previous version is running. + ==== [source,bash] ---- @@ -1139,7 +1053,7 @@ Function composition lets you attach a functional logic dynamically to an existi == Functional Applications -With Spring Cloud Stream 3.x adding link:https://cloud.spring.io/spring-cloud-static/spring-cloud-stream/current/reference/html/spring-cloud-stream.html#spring-cloud-stream-overview-producing-consuming-messages[functional support], you can build `Source`, `Sink` and `Processor` applications merely by implementing the Java Util's `Supplier`, `Consumer`, and `Function` interfaces respectively. +With Spring Cloud Stream 3.x adding link:https://docs.spring.io/spring-cloud-stream/docs/3.2.x/reference/html/spring-cloud-stream.html#spring_cloud_function[functional support], you can build `Source`, `Sink` and `Processor` applications merely by implementing the Java Util's `Supplier`, `Consumer`, and `Function` interfaces respectively. See the link:https://dataflow.spring.io/docs/recipes/functional-apps/[Functional Application Recipe] of the SCDF site for more about this feature. [[spring-cloud-dataflow-stream-examples]] @@ -1151,8 +1065,6 @@ This chapter includes the following examples: * <> * <> -You can find links to more samples in the "`<>`" chapter. - [[spring-cloud-dataflow-simple-stream]] === Simple Stream Processing diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc index 7ab4bac713..be7252d1be 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/tasks.adoc @@ -15,13 +15,13 @@ A task application is short-lived, meaning that it stops running on purpose and One use case might be to scrape a web page and write to the database. The https://cloud.spring.io/spring-cloud-task/[Spring Cloud Task] framework is based on Spring Boot and adds the ability for Boot applications to record the lifecycle events of a short-lived application, such as when it starts, when it ends, and the exit status. -The https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/htmlsingle/#features-task-execution-details[`TaskExecution`] documentation shows which information is stored in the database. -The entry point for code execution in a Spring Cloud Task application is most often an implementation of Boot's `CommandLineRunner` interface, as shown in this https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/htmlsingle/#getting-started-writing-the-code[example]. +The https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/html#features-task-execution-details[`TaskExecution`] documentation shows which information is stored in the database. +The entry point for code execution in a Spring Cloud Task application is most often an implementation of Boot's `CommandLineRunner` interface, as shown in this https://docs.spring.io/spring-cloud-task/docs/{spring-cloud-task-version}/reference/html#getting-started-writing-the-code[example]. The Spring Batch project is probably what comes to mind for Spring developers writing short-lived applications. Spring Batch provides a much richer set of functionality than Spring Cloud Task and is recommended when processing large volumes of data. One use case might be to read many CSV files, transform each row of data, and write each transformed row to a database. -Spring Batch provides its own database schema with a much more rich https://docs.spring.io/spring-batch/{spring-batch-doc-version}/reference/html/schema-appendix.html#metaDataSchema[set of information] about the execution of a Spring Batch job. +Spring Batch provides its own database schema with a much more rich https://docs.spring.io/spring-batch/docs/{spring-batch-version}/reference/html/schema-appendix.html#metaDataSchema[set of information] about the execution of a Spring Batch job. Spring Cloud Task is integrated with Spring Batch so that, if a Spring Cloud Task application defines a Spring Batch `Job`, a link between the Spring Cloud Task and Spring Cloud Batch execution tables is created. When running Data Flow on your local machine, Tasks are launched in a separate JVM. @@ -42,7 +42,8 @@ Before you dive deeper into the details of creating Tasks, you should understand [[spring-cloud-dataflow-create-task-apps]] === Creating a Task Application -While Spring Cloud Task does provide a number of out-of-the-box applications (at https://github.com/spring-cloud-task-app-starters[spring-cloud-task-app-starters]), most task applications require custom development. +Spring Cloud Dataflow provides a couple of out-of-the-box task applications (https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-task[timestamp-task] and https://github.com/spring-cloud/spring-cloud-dataflow-samples/tree/main/timestamp-batch[timestamp-batch]) but most task applications require custom development. + To create a custom task application: . Use the https://start.spring.io[Spring Initializer] to create a new project, making sure to select the following starters: @@ -64,7 +65,7 @@ public class MyTask { ---- + . With this class, you need one or more `CommandLineRunner` or `ApplicationRunner` implementations within your application. You can either implement your own or use the ones provided by Spring Boot (there is one for running batch jobs, for example). -. Packaging your application with Spring Boot into an über jar is done through the standard {spring-boot-docs-reference}/html/getting-started-first-application.html#getting-started-first-application-executable-jar[Spring Boot conventions]. +. Packaging your application with Spring Boot into an über jar is done through the standard {spring-boot-docs}/#getting-started.first-application.executable-jar[Spring Boot conventions]. The packaged application can be registered and deployed as noted below. ==== Task Database Configuration @@ -127,7 +128,7 @@ For example, if you would like to register all the task applications that ship w ==== [source,bash] ---- -dataflow:>app import --uri https://dataflow.spring.io/task-maven-latest +dataflow:>app import --uri https://dataflow.spring.io/task-maven-3-0-x ---- ==== @@ -287,6 +288,30 @@ NOTE: Properties configured by using this mechanism have lower precedence than t They are overridden if a property with the same key is specified at task launch time (for example, `app.trigger.prop2` overrides the common property). +==== Launching tasks with a specific application version + +When launching a task you can specify the specific version of the application. +If no version is specified Spring Cloud Data Flow will use the default version of the application. +To specify a version of the application to be used at launch time use the deployer property `version.`. +For example: + +==== +[source,bash,subs=attributes] +---- +task launch my-task --properties 'version.timestamp=3.0.0' +---- +==== + +Similarly, when scheduling a task you will use the same format of `version.`. For example: + +==== +[source,bash,subs=attributes] +---- +task schedule create --name my-schedule --definitionName my-task --expression '*/1 * * * *' --properties 'version.timestamp=3.0.0' +---- +==== + + [[spring-cloud-dataflow-task-limit-concurrent-executions]] === Limit the number concurrent task launches @@ -476,6 +501,8 @@ If you submit a stop for a task execution that has child task executions associa WARNING: When stopping a task execution that has a running Spring Batch job, the job is left with a batch status of `STARTED`. Each of the supported platforms sends a SIG-INT to the task application when a stop is requested. That allows Spring Cloud Task to capture the state of the app. However, Spring Batch does not handle a SIG-INT and, as a result, the job stops but remains in the STARTED status. +NOTE: When launching Remote Partitioned Spring Batch Task applications, Spring Cloud Data Flow supports stopping a worker partition task directly for both Cloud Foundry and Kubernetes platforms. Stopping worker partition task is not supported for the local platform. + ==== Stopping a Task Execution that was Started Outside of Spring Cloud Data Flow You may wish to stop a task that has been launched outside of Spring Cloud Data Flow. An example of this is the worker applications launched by a remote batch partitioned application. @@ -541,19 +568,9 @@ Spring Cloud Data Flow lets you create a directed graph, where each node of the This is done by using the DSL for composed tasks. You can create a composed task through the RESTful API, the Spring Cloud Data Flow Shell, or the Spring Cloud Data Flow UI. -=== Configuring the Composed Task Runner - -Composed tasks are run through a task application called the Composed Task Runner. - -==== Registering the Composed Task Runner +=== The Composed Task Runner -By default, Spring Cloud Data Flow retrieves the composed task runner application from -Maven Central for Cloud Foundry and local deployments and DockerHub for Kubernetes. It -retrieves the composed task runner upon the first use of composed tasks. - -If Maven Central or DockerHub cannot be reached for a given Spring Cloud Data Flow -deployment, you can specify a new URI from which to retrieve the composed task runner by setting the -`spring.cloud.dataflow.task.composedtaskrunner.uri` property. +Composed tasks are run through a task application called the Composed Task Runner. The Spring Cloud Data Flow server automatically deploys the Composed Task Runner when launching a composed task. ==== Configuring the Composed Task Runner @@ -570,15 +587,32 @@ The `ComposedTaskRunner` task has the following options: The command line arguments to be used for each of the tasks. (String, default: ). * `increment-instance-enabled` -Allows a single `ComposedTaskRunner` instance to be run again without changing the parameters. The default is `false`, which means a `ComposedTaskRunner` instance can be started only once with a given set of parameters. If `true` it can be re-started. (Boolean, default: `false`). +Allows a single `ComposedTaskRunner` instance to be run again without changing the parameters by adding a incremented number job parameter based on `run.id` from the previous execution. (Boolean, default: `true`). ComposedTaskRunner is built by using https://github.com/spring-projects/spring-batch[Spring Batch]. As a result, upon a successful execution, the batch job is considered to be complete. -To launch the same `ComposedTaskRunner` definition multiple times, you must set the `increment-instance-enabled` property to `true` or change the parameters for the definition for each launch. +To launch the same `ComposedTaskRunner` definition multiple times, you must set either `increment-instance-enabled` or `uuid-instance-enabled` property to `true` or change the parameters for the definition for each launch. When using this option, it must be applied for all task launches for the desired application, including the first launch. +* `uuid-instance-enabled` +Allows a single `ComposedTaskRunner` instance to be run again without changing the parameters by adding a UUID to the `ctr.id` job parameter. (Boolean, default: `false`). +ComposedTaskRunner is built by using https://github.com/spring-projects/spring-batch[Spring Batch]. As a result, upon a successful execution, the batch job is considered to be complete. +To launch the same `ComposedTaskRunner` definition multiple times, you must set either `increment-instance-enabled` or `uuid-instance-enabled` property to `true` or change the parameters for the definition for each launch. +When using this option, it must be applied for all task launches for the desired application, including the first launch. This option when set to true will override the value of `increment-instance-id`. +Set this option to `true` when running multiple instances of the same composed task definition at the same time. + * `interval-time-between-checks` The amount of time, in milliseconds, that the `ComposedTaskRunner` waits between checks of the database to see if a task has completed. (Integer, default: `10000`). `ComposedTaskRunner` uses the datastore to determine the status of each child tasks. This interval indicates to `ComposedTaskRunner` how often it should check the status its child tasks. +* `transaction-isolation-level` +Establish the transaction isolation level for the Composed Task Runner. +A list of available transaction isolation levels can be found https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/transaction/TransactionDefinition.html[here]. +Default is `ISOLATION_REPEATABLE_READ`. + +* `max-start-wait-time` +The maximum amount of time, in milliseconds, that the Composed Task Runner will wait for the +`start_time` of a steps `taskExecution` to be set before the execution of the Composed task is failed (Integer, default: 0). +Determines the maximum time each child task is allowed for application startup. The default of `0` indicates no timeout. + * `max-wait-time` The maximum amount of time, in milliseconds, that an individual step can run before the execution of the Composed task is failed (Integer, default: 0). Determines the maximum time each child task is allowed to run before the CTR ends with a failure. The default of `0` indicates no timeout. @@ -730,7 +764,7 @@ Each of them ran successfully with an `Exit Code` as `0`. ===== Passing Properties to the Child Tasks To set the properties for child tasks in a composed task graph at task launch time, -use the following format: `app...`. +use the following format: `app..`. The following listing shows a composed task definition as an example: ==== @@ -745,16 +779,16 @@ To have `mytaskapp` display 'HELLO' and set the `mytimestamp` timestamp format t ==== [source,bash] ---- -task launch my-composed-task --properties "app.my-composed-task.mytaskapp.displayMessage=HELLO,app.my-composed-task.mytimestamp.timestamp.format=YYYY" +task launch my-composed-task --properties "app.mytaskapp.displayMessage=HELLO,app.mytimestamp.timestamp.format=YYYY" ---- ==== -Similar to application properties, you can also set the `deployer` properties for child tasks by using the following format: `deployer...`: +Similar to application properties, you can also set the `deployer` properties for child tasks by using the following format: `deployer..`: ==== [source,bash] ---- -task launch my-composed-task --properties "deployer.my-composed-task.mytaskapp.memory=2048m,app.my-composed-task.mytimestamp.timestamp.format=HH:mm:ss" +task launch my-composed-task --properties "deployer.mytaskapp.memory=2048m,app.mytimestamp.timestamp.format=HH:mm:ss" Launched task 'a1' ---- ==== @@ -769,30 +803,11 @@ You can pass command-line arguments for the composed task runner by using the `- dataflow:>task create my-composed-task --definition "" Created new task 'my-composed-task' -dataflow:>task launch my-composed-task --arguments "--increment-instance-enabled=true --max-wait-time=50000 --split-thread-core-pool-size=4" --properties "app.my-composed-task.bbb.timestamp.format=dd/MM/yyyy HH:mm:ss" +dataflow:>task launch my-composed-task --arguments "--increment-instance-enabled=true --max-wait-time=50000 --split-thread-core-pool-size=4" --properties "app.bbb.timestamp.format=dd/MM/yyyy HH:mm:ss" Launched task 'my-composed-task' ---- ==== -===== Launching a Composed Task by using a Custom Composed Task Runner - -In some cases, you need to launch a composed task by using a custom version of a composed task runner other than the default application that is shipped out-of-the-box. -To do this, you need to register the custom version of the composed task runner and then specify the `composedTaskRunnerName` property to point to the custom application at task launch, as follows: - -==== -[source,bash,options="nowrap"] ----- -dataflow:>app register --name best-ctr --type task --uri maven://the.best.ctr.composed-task-runner:1.0.0.RELEASE - -dataflow:>task create mycomposedtask --definition "te:timestamp && tr:timestamp" -Created new task 'mycomposedtask' - -dataflow:>task launch --name mycomposedtask --composedTaskRunnerName best-ctr ----- -==== - -NOTE: The application specified by the `composedTaskRunnerName` needs to be a task registered in the Application Registry. - ===== Exit Statuses The following list shows how the exit status is set for each step (task) contained in the composed task following each step execution: @@ -882,7 +897,7 @@ If `task1` fails, `task2` does not launch. You can also use the Spring Cloud Data Flow Dashboard to create your conditional execution, by using the designer to drag and drop applications that are required and connecting them together to create your directed graph, as shown in the following image: .Conditional Execution -image::{dataflow-asciidoc}/images/dataflow-ctr-conditional-execution.png[Composed Task Conditional Execution, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-conditional-execution.png[Composed Task Conditional Execution, scaledwidth="50%"] The preceding diagram is a screen capture of the directed graph as it being created by using the Spring Cloud Data Flow Dashboard. You can see that four components in the diagram comprise a conditional execution: @@ -923,7 +938,7 @@ All other statuses returned by `cat` have no effect, and the task would end norm Using the Spring Cloud Data Flow Dashboard to create the same "`basic transition`" would resemble the following image: .Basic Transition -image::{dataflow-asciidoc}/images/dataflow-ctr-transition-basic.png[Composed Task Basic Transition, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-transition-basic.png[Composed Task Basic Transition, scaledwidth="50%"] The preceding diagram is a screen capture of the directed graph as it being created in the Spring Cloud Data Flow Dashboard. Notice that there are two different types of connectors: @@ -957,7 +972,7 @@ For any exit status of `cat` other than `FAILED`, `baz` would launch. Using the Spring Cloud Data Flow Dashboard to create the same "`transition with wildcard`" would resemble the following image: .Basic Transition With Wildcard -image::{dataflow-asciidoc}/images/dataflow-ctr-transition-basic-wildcard.png[Composed Task Basic Transition with Wildcard, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-transition-basic-wildcard.png[Composed Task Basic Transition with Wildcard, scaledwidth="50%"] ==== Transition With a Following Conditional Execution @@ -978,11 +993,26 @@ For any exit status of `foo` other than `FAILED` or `UNKNOWN`, `qux` would launc Using the Spring Cloud Data Flow Dashboard to create the same "`transition with conditional execution`" would resemble the following image: .Transition With Conditional Execution -image::{dataflow-asciidoc}/images/dataflow-ctr-transition-conditional-execution.png[Composed Task Transition with Conditional Execution, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-transition-conditional-execution.png[Composed Task Transition with Conditional Execution, scaledwidth="50%"] NOTE: In this diagram, the dashed line (transition) connects the `foo` application to the target applications, but a solid line connects the conditional executions between `foo`, `qux`, and `quux`. +==== Ignoring Exit Message +If any child task within a split returns an `ExitMessage` other than `COMPLETED` the split +will have an `ExitStatus` of `FAILED`. To ignore the `ExitMessage` of a child task, +add the `ignoreExitMessage=true` for each app that will return an `ExitMessage` +within the split. When using this flag, the `ExitStatus` of the task will be +`COMPLETED` if the `ExitCode` of the child task is zero. The split will have an +`ExitStatus` of `FAILED` if the `ExitCode`s is non zero. There are 2 ways to +set the `ignoreExitMessage` flag: + +1. Setting the property for each of the apps that need to have their exitMessage +ignored within the split. For example a split like `` where `BBB` +will return an `exitMessage`, you would set the `ignoreExitMessage` property like +`app.BBB.ignoreExitMessage=true` +2. You can also set it for all apps using the composed-task-arguments property, +for example: `--composed-task-arguments=--ignoreExitMessage=true`. [[spring-cloud-data-flow-split-execution]] === Split Execution @@ -1003,7 +1033,7 @@ The preceding example launches tasks `foo`, `bar` and `baz` in parallel. Using the Spring Cloud Data Flow Dashboard to create the same "`split execution`" would resemble the following image: .Split -image::{dataflow-asciidoc}/images/dataflow-ctr-split.png[Composed Task Split, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-split.png[Composed Task Split, scaledwidth="50%"] With the task DSL, you can also run multiple split groups in succession, as shown in the following example: @@ -1022,7 +1052,7 @@ However, if `foo`, `bar`, or `baz` fails, the split containing `qux` and `quux` Using the Spring Cloud Data Flow Dashboard to create the same "`split with multiple groups`" would resemble the following image: .Split as a part of a conditional execution -image::{dataflow-asciidoc}/images/dataflow-ctr-multiple-splits.png[Composed Task Split, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-multiple-splits.png[Composed Task Split, scaledwidth="50%"] Notice that there is a `SYNC` control node that is inserted by the designer when connecting two consecutive splits. @@ -1047,7 +1077,7 @@ However, `bar` does not launch until `foo` completes successfully. Using the Spring Cloud Data Flow Dashboard to create the same " `split containing conditional execution` " resembles the following image: .Split with conditional execution -image::{dataflow-asciidoc}/images/dataflow-ctr-split-contains-conditional.png[Composed Task Split With Conditional Execution, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-ctr-split-contains-conditional.png[Composed Task Split With Conditional Execution, scaledwidth="50%"] ==== Establishing the Proper Thread Count for Splits @@ -1059,15 +1089,15 @@ Then `DDD` and `EEE` would run in parallel. [[spring-cloud-dataflow-launch-tasks-from-stream]] == Launching Tasks from a Stream -You can launch a task from a stream by using the https://github.com/spring-cloud-stream-app-starters/tasklauncher-dataflow/blob/master/spring-cloud-starter-stream-sink-task-launcher-dataflow/README.adoc[`task-launcher-dataflow`] sink. +You can launch a task from a stream by using the https://github.com/spring-cloud/spring-cloud-dataflow/tree/main/spring-cloud-dataflow-tasklauncher/README.adoc[`task-launcher-dataflow`] sink which is provided as a part of the Spring Cloud Data Flow project. The sink connects to a Data Flow server and uses its REST API to launch any defined task. -The sink accepts a https://github.com/spring-cloud-stream-app-starters/tasklauncher-dataflow/blob/master/spring-cloud-starter-stream-sink-task-launcher-dataflow/README.adoc#payload[JSON payload] representing a `task launch request`, which provides the name of the task to launch and may include command line arguments and deployment properties. +The sink accepts a https://github.com/spring-cloud/spring-cloud-dataflow/tree/main/spring-cloud-dataflow-tasklauncher/README.adoc#payload[JSON payload] representing a `task launch request`, which provides the name of the task to launch and may include command line arguments and deployment properties. -The https://github.com/spring-cloud-stream-app-starters/core/blob/master/common/app-starters-task-launch-request-common/README.adoc[`app-starters-task-launch-request-common`] component, in conjunction with Spring Cloud Stream https://docs.spring.io/spring-cloud-stream/docs/current-snapshot/reference/htmlsingle/#_functional_composition[functional composition], can transform the output of any source or processor to a task launch request. +The https://github.com/spring-cloud/stream-applications/tree/main/functions/function/task-launch-request-function/README.adoc[`task-launch-request-function`] component, in conjunction with Spring Cloud Stream https://docs.spring.io/spring-cloud-stream/docs/current-snapshot/reference/htmlsingle/#_functional_composition[functional composition], can transform the output of any source or processor to a task launch request. -Adding a dependency to `app-starters-task-launch-request-common` auto-configures a `java.util.function.Function` implementation, registered through https://cloud.spring.io/spring-cloud-function/[Spring Cloud Function] as a `taskLaunchRequest`. +Adding a dependency to `task-launch-request-function` auto-configures a `java.util.function.Function` implementation, registered through https://cloud.spring.io/spring-cloud-function/[Spring Cloud Function] as a `taskLaunchRequest`. -For example, you can start with the https://github.com/spring-cloud-stream-app-starters/time/tree/master/spring-cloud-starter-stream-source-time[time] source, add the following dependency, build it, and register it as a custom source. We call it `time-tlr` in this example: +For example, you can start with the https://github.com/spring-cloud/stream-applications/tree/main/applications/source/time-source[time] source, add the following dependency, build it, and register it as a custom source. ==== [source,xml] @@ -1079,14 +1109,16 @@ For example, you can start with the https://github.com/spring-cloud-stream-app-s ---- ==== -TIP: https://start-scs.cfapps.io/[Spring Cloud Stream Initializr] provides a great starting point for creating stream applications. +To build the application follow the instructions https://github.com/spring-cloud/stream-applications#building-stream-applications[here]. + +This will create an `apps` directory that contains `time-source-rabbit` and `time-source-kafka` directories in the `/applications/source/time-source` directory. In each of these you will see a target directory that contains a `time-source--.jar`. Now register the `time-source` jar (use the appropriate binder jar) with SCDF as a time source named `timestamp-tlr`. -Next, <> the `task-launcher-dataflow` sink and create a task (we use the provided timestamp task): +Next, register the `task-launcher-dataflow` sink with SCDF and create a task definition `timestamp-task`. Once this is complete create the stream definition as shown below: ==== [source,bash] ---- -stream create --name task-every-minute --definition "time-tlr --trigger.fixed-delay=60 --spring.cloud.stream.function.definition=taskLaunchRequest --task.launch.request.task-name=timestamp-task | task-launcher-dataflow" --deploy +stream create --name task-every-minute --definition 'timestamp-tlr --fixed-delay=60000 --task.launch.request.task-name=timestamp-task --spring.cloud.function.definition=\"timeSupplier|taskLaunchRequestFunction\"| tasklauncher-sink' --deploy ---- ==== @@ -1097,7 +1129,7 @@ The following stream definition illustrates the use of command line arguments. I ==== [source,bash] ---- -stream create --name task-every-second --definition "time-tlr --spring.cloud.stream.function.definition=taskLaunchRequest --task.launch.request.task-name=timestamp-task --task.launch.request.args=foo=bar --task.launch.request.arg-expressions=time=payload | task-launcher-dataflow" --deploy +stream create --name task-every-second --definition 'timestamp-tlr --task.launch.request.task-name=timestamp-task --spring.cloud.function.definition=\"timeSupplier|taskLaunchRequestFunction\" --task.launch.request.args=foo=bar --task.launch.request.arg-expressions=time=payload | tasklauncher-sink' --deploy ---- ==== @@ -1109,14 +1141,14 @@ You can then see the list of task executions by using the shell command `task ex [source,bash,options="nowrap"] ---- dataflow:>task execution list -╔════════════════════╤══╤════════════════════════════╤════════════════════════════╤═════════╗ -║ Task Name │ID│ Start Time │ End Time │Exit Code║ -╠════════════════════╪══╪════════════════════════════╪════════════════════════════╪═════════╣ -║timestamp-task_26176│4 │Tue May 02 12:13:49 EDT 2017│Tue May 02 12:13:49 EDT 2017│0 ║ -║timestamp-task_32996│3 │Tue May 02 12:12:49 EDT 2017│Tue May 02 12:12:49 EDT 2017│0 ║ -║timestamp-task_58971│2 │Tue May 02 12:11:50 EDT 2017│Tue May 02 12:11:50 EDT 2017│0 ║ -║timestamp-task_13467│1 │Tue May 02 12:10:50 EDT 2017│Tue May 02 12:10:50 EDT 2017│0 ║ -╚════════════════════╧══╧════════════════════════════╧════════════════════════════╧═════════╝ +╔══════════════╤═══╤════════════════════════════╤════════════════════════════╤═════════╗ +║ Task Name │ID │ Start Time │ End Time │Exit Code║ +╠══════════════╪═══╪════════════════════════════╪════════════════════════════╪═════════╣ +║timestamp-task│581│Thu Sep 08 11:38:33 EDT 2022│Thu Sep 08 11:38:33 EDT 2022│0 ║ +║timestamp-task│580│Thu Sep 08 11:38:31 EDT 2022│Thu Sep 08 11:38:31 EDT 2022│0 ║ +║timestamp-task│579│Thu Sep 08 11:38:29 EDT 2022│Thu Sep 08 11:38:29 EDT 2022│0 ║ +║timestamp-task│578│Thu Sep 08 11:38:26 EDT 2022│Thu Sep 08 11:38:26 EDT 2022│0 ║ +╚══════════════╧═══╧════════════════════════════╧════════════════════════════╧═════════╝ ---- ==== @@ -1128,43 +1160,30 @@ This pattern may be applied to any source to launch a task in response to any ev A composed task can be launched with the `task-launcher-dataflow` sink, as discussed <>. Since we use the `ComposedTaskRunner` directly, we need to set up the task definitions for the composed task runner itself, along with the composed tasks, prior to the creation of the composed task launching stream. Suppose we wanted to create the following composed task definition: `AAA && BBB`. -The first step would be to create the task definitions, as shown in the following example: +The first step would be to create the task definition, as shown in the following example: ==== [source] ---- -task create composed-task-runner --definition "composed-task-runner" -task create AAA --definition "timestamp" -task create BBB --definition "timestamp" +task create --name composed-task-sample --definition "AAA: timestamp && BBB: timestamp" ---- ==== -NOTE: Releases of `ComposedTaskRunner` can be found -https://github.com/spring-cloud-task-app-starters/composed-task-runner/releases[here]. +Now that the task definition we need for composed task definition is ready, we need to create a stream that launches `composed-task-sample`. +We create a stream with: -Now that the task definitions we need for composed task definition are ready, we need to create a stream that launches `ComposedTaskRunner`. -So, in this case, we create a stream with: - -* The `time` source customized to emit task launch requests, as shown <>. -* The `task-launcher-dataflow` sink that launches the `ComposedTaskRunner` +* The `timestamp-tlr` source customized to emit task launch requests, as shown <>. +* The `task-launcher` sink that launches the `composed-task-sample` The stream should resemble the following: ==== [source] ---- -stream create ctr-stream --definition "time --fixed-delay=30 --task.launch.request.task-name=composed-task-launcher --task.launch.request.args=--graph=AAA&&BBB,--increment-instance-enabled=true | task-launcher-dataflow" +stream create --name ctr-stream --definition "timestamp-tlr --fixed-delay=30000 --spring.cloud.function.definition=\"timeSupplier|taskLaunchRequestFunction\" --task.launch.request.task-name=composed-task-sample | tasklauncher-sink" --deploy ---- ==== -For now, we focus on the configuration that is required to launch the `ComposedTaskRunner`: - -* `graph`: This is the graph that is to be executed by the `ComposedTaskRunner`. -In this case it is `AAA&&BBB`. -* `increment-instance-enabled`: This lets each execution of `ComposedTaskRunner` be unique. -`ComposedTaskRunner` is built by using https://projects.spring.io/spring-batch/[Spring Batch]. -Thus, we want a new Job Instance for each launch of the `ComposedTaskRunner`. -To do this, we set `increment-instance-enabled` to be `true`. [[sharing-spring-cloud-dataflows-datastore-with-tasks]] == Sharing Spring Cloud Data Flow's Datastore with Tasks @@ -1237,7 +1256,7 @@ When using Kubernetes, a https://kubernetes.io/docs/concepts/workloads/controlle NOTE: Scheduled tasks do not implement the continuous deployment feature. Any changes to application version or properties for a task definition in Spring Cloud Data Flow will not affect scheduled tasks. .Architectural Overview -image::{dataflow-asciidoc}/images/dataflow-scheduling-architecture.png[Scheduler Architecture Overview, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-scheduling-architecture.png[Scheduler Architecture Overview, scaledwidth="50%"] === Enabling Scheduling @@ -1327,7 +1346,7 @@ dataflow:>task schedule list ╔══════════════════════════╤════════════════════╤════════════════════════════════════════════════════╗ ║ Schedule Name │Task Definition Name│ Properties ║ ╠══════════════════════════╪════════════════════╪════════════════════════════════════════════════════╣ -║mytaskschedule │mytask │spring.cloud.scheduler.cron.expression = */1 * * * *║ +║mytaskschedule │mytask │spring.cloud.deployer.cron.expression = */1 * * * * ║ ╚══════════════════════════╧════════════════════╧════════════════════════════════════════════════════╝ ---- ==== @@ -1340,7 +1359,7 @@ As task applications evolve, you want to get your updates to production. This se When a task application is registered (see <>), a version is associated with it. A task application can have multiple versions associated with it, with one selected as the default. The following image illustrates an application with multiple versions associated with it (see the timestamp entry). -image::{dataflow-asciidoc}/images/dataflow-task-application-versions.png[Task Application Versions, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-task-application-versions.png[Task Application Versions, scaledwidth="50%"] Versions of an application are managed by registering multiple applications with the same name and coordinates, _except_ the version. For example, if you were to register an application with the following values, you would get one application registered with two versions (2.1.0.RELEASE and 2.1.1.RELEASE): @@ -1355,13 +1374,13 @@ Versions of an application are managed by registering multiple applications with Besides having multiple versions, Spring Cloud Data Flow needs to know which version to run on the next launch. This is indicated by setting a version to be the default version. Whatever version of a task application is configured as the default version is the one to be run on the next launch request. You can see which version is the default in the UI, as this image shows: -image::{dataflow-asciidoc}/images/dataflow-task-default-version.png[Task Application Default Version, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-task-default-version.png[Task Application Default Version, scaledwidth="50%"] === Task Launch Lifecycle In previous versions of Spring Cloud Data Flow, when the request to launch a task was received, Spring Cloud Data Flow would deploy the application (if needed) and run it. If the application was being run on a platform that did not need to have the application deployed every time (CloudFoundry, for example), the previously deployed application was used. This flow has changed in 2.3. The following image shows what happens when a task launch request comes in now: -image::{dataflow-asciidoc}/images/dataflow-task-launch-flow.png[Flow For Launching A Task, scaledwidth="50%"] +image::{dataflow-asciidoc-images}/dataflow-task-launch-flow.png[Flow For Launching A Task, scaledwidth="50%"] There are three main flows to consider in the preceding diagram. Launching the first time or launching with no changes is one. The other two are launching when there are changes but the appliction is not currently and launching when there are changes and the application is running. We look at the flow with no changes first. diff --git a/spring-cloud-dataflow-package/pom.xml b/spring-cloud-dataflow-package/pom.xml new file mode 100644 index 0000000000..252d52a267 --- /dev/null +++ b/spring-cloud-dataflow-package/pom.xml @@ -0,0 +1,52 @@ + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-package + spring-cloud-dataflow-package + Data Flow Package + + pom + + ${project.version} + oss + 3.4.1 + + + + + org.apache.maven.plugins + maven-resources-plugin + 3.3.1 + + + org.apache.maven.plugins + maven-assembly-plugin + 3.5.0 + + spring-cloud-dataflow-${scdf.type.version}-install-${package.version} + + + + package + + single + + + false + + src/main/assembly/zip.xml + + + + + + + + diff --git a/spring-cloud-dataflow-package/set-package-version.sh b/spring-cloud-dataflow-package/set-package-version.sh new file mode 100755 index 0000000000..d09db0c98a --- /dev/null +++ b/spring-cloud-dataflow-package/set-package-version.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +if [ -z "$BASH_VERSION" ]; then + echo "This script requires Bash. Use: bash $0 $*" + exit 0 +fi +SCDIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +ROOT=$(realpath $SCDIR/..) +if [ "$PACKAGE_VERSION" = "" ]; then + pushd $ROOT > /dev/null + ./mvnw help:evaluate -s .settings.xml -Dexpression=project.version > /dev/null + PACKAGE_VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout) + popd > /dev/null +fi +echo "PACKAGE_VERSION=$PACKAGE_VERSION" +if [[ "$PACKAGE_VERSION" != *"SNAPSHOT"* ]]; then + yq '.default.version="release"' -i "$ROOT/src/deploy/versions.yaml" + echo "Setting default.version=release, default.package-version=$PACKAGE_VERSION" + yq ".default.package-version=\"$PACKAGE_VERSION\"" -i "$ROOT/src/deploy/versions.yaml" + echo "Setting scdf-type.oss.release=$PACKAGE_VERSION" + yq ".scdf-type.oss.release=\"$PACKAGE_VERSION\"" -i "$ROOT/src/deploy/versions.yaml" +fi diff --git a/spring-cloud-dataflow-package/src/main/assembly/zip.xml b/spring-cloud-dataflow-package/src/main/assembly/zip.xml new file mode 100644 index 0000000000..4d85fa5f2f --- /dev/null +++ b/spring-cloud-dataflow-package/src/main/assembly/zip.xml @@ -0,0 +1,23 @@ + + distribution + false + + zip + + + + ${project.basedir}/../src/deploy + deploy + + **/*.adoc + **/*.jar + + + + ${project.basedir}/../src/kubernetes + deploy/kubernetes + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-parent/pom.xml b/spring-cloud-dataflow-parent/pom.xml new file mode 100644 index 0000000000..63a2c17422 --- /dev/null +++ b/spring-cloud-dataflow-parent/pom.xml @@ -0,0 +1,560 @@ + + + 4.0.0 + org.springframework.cloud + spring-cloud-dataflow-parent + spring-cloud-dataflow-parent + Data Flow Parent + + 3.0.0-SNAPSHOT + pom + https://cloud.spring.io/spring-cloud-dataflow/ + + + 4.9.9 + 17 + -Xdoclint:none + 3.3.1 + 3.5.0 + + 3.3.7 + + 3.0.0-SNAPSHOT + + 6.1.14 + 3.4.3-SNAPSHOT + ${dataflow.version} + ${dataflow.version} + 3.0.0-SNAPSHOT + 3.1.3 + ${dataflow.version} + 0.8.12 + 3.0.2 + 2.2.0 + 1.5.5 + 0.5 + 1.5.4 + 9.39.3 + 1.1.10.7 + 1.26.2 + 2.11.1 + 3.0.2 + 2.12.7 + 1.12.513 + + 5.0.0 + 3.2.0 + 1.0.14 + 1.0.14 + 2.6.0 + 32.1.3-jre + + 21.9.0.0 + 11.5.9.0 + 3.4.0 + + 20240303 + 1.1.2 + + + + + + com.nimbusds + nimbus-jose-jwt + ${nimbus-jose-jwt.version} + + + + org.xerial.snappy + snappy-java + ${snappy-java.version} + + + + org.apache.commons + commons-compress + ${commons-compress.version} + + + + + + org.springframework.boot + spring-boot-dependencies + ${spring-boot.version} + pom + import + + + org.springframework.cloud + spring-cloud-dataflow-build-dependencies + ${dataflow.version} + pom + import + + + org.springframework.cloud + spring-cloud-dataflow-common-dependencies + ${dataflow.version} + pom + import + + + org.springframework.cloud + spring-cloud-common-security-config-core + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-common-security-config-web + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-starter-common-security-config-web + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-task-dependencies + ${spring-cloud-task.version} + pom + import + + + org.springframework.cloud + spring-cloud-starter-single-step-batch-job + ${spring-cloud-task.version} + + + org.springframework.cloud + spring-cloud-dataflow-ui + ${spring-cloud-dataflow-ui.version} + + + org.springframework.cloud + spring-cloud-deployer-spi + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-resource-support + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-resource-maven + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-resource-docker + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-local + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-cloudfoundry + ${spring-cloud-deployer.version} + + + org.springframework.cloud + spring-cloud-deployer-kubernetes + ${spring-cloud-deployer.version} + + + org.apache.directory.server + apacheds-protocol-ldap + ${apache-directory-server.version} + + + io.codearte.props2yaml + props2yaml + ${codearte-props2yml.version} + + + net.javacrumbs.json-unit + json-unit-assertj + ${json-unit.version} + + + com.google.code.findbugs + jsr305 + ${findbugs.version} + + + com.google.guava + guava + ${guava.version} + + + joda-time + joda-time + ${joda-time.version} + + + com.amazonaws + aws-java-sdk-ecr + ${aws-java-sdk-ecr.version} + + + org.codehaus.jettison + jettison + ${jettison.version} + + + org.springdoc + springdoc-openapi-starter-webmvc-ui + ${springdoc-openapi.version} + + + + com.wavefront + wavefront-spring-boot-bom + ${wavefront-spring-boot-bom.version} + pom + import + + + org.springframework.cloud.stream.app + stream-applications-micrometer-common + ${stream-applications.version} + + + org.springframework.cloud.stream.app + stream-applications-security-common + ${stream-applications.version} + + + org.springframework.cloud.stream.app + stream-applications-postprocessor-common + ${stream-applications.version} + + + org.springframework.cloud + spring-cloud-deployer-dependencies + ${spring-cloud-deployer.version} + pom + import + + + com.github.dasniko + testcontainers-keycloak + ${testcontainers-keycloak.version} + + + + + + jakarta.annotation + jakarta.annotation-api + + + + + + repo.spring.io + Spring Release Repository + https://repo.spring.io/libs-release-local + + + repo.spring.io + Spring Snapshot Repository + https://repo.spring.io/libs-snapshot-local + + + + Pivotal Software, Inc. + https://www.spring.io + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0 + + + + https://github.com/spring-cloud/spring-cloud-dataflow-build + scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git + + + scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git + + HEAD + + + + scdf-team + Data Flow Team + https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.5.0 + + + org.springframework.boot + spring-boot-maven-plugin + ${spring-boot.version} + + CLASSIC + + + + org.sonarsource.scanner.maven + sonar-maven-plugin + ${sonar-maven-plugin.version} + + + org.jacoco + jacoco-maven-plugin + ${jacoco-maven-plugin.version} + + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + source + + jar + + package + + + + + io.github.git-commit-id + git-commit-id-maven-plugin + ${git-commit-id-plugin.version} + + + + revision + + + + + true + ${project.build.outputDirectory}/git.properties + full + + + + org.springframework.cloud + spring-cloud-dataflow-apps-docs-plugin + ${spring-cloud-dataflow-apps-docs-plugin.version} + + + generate-documentation + verify + + generate-documentation + + + + + + org.springframework.cloud + spring-cloud-dataflow-apps-metadata-plugin + ${spring-cloud-dataflow-apps-metadata-plugin.version} + + + org.apache.maven.plugins + maven-jxr-plugin + 3.1.1 + + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.13.0 + + true + ${java.version} + + + + org.apache.maven.plugins + maven-surefire-report-plugin + ${maven-surefire-report-plugin.version} + + + org.apache.maven.plugins + maven-surefire-plugin + + --add-opens java.base/java.util=ALL-UNNAMED + 1 + 1 + + **/Abstract*.java + + + + + org.jacoco + jacoco-maven-plugin + + + agent + + prepare-agent + + + + report + test + + report + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.5.0 + + + + + + + + org.apache.maven.plugins + maven-jxr-plugin + 3.1.1 + + + + + + deploymentfiles + + + + org.apache.maven.plugins + maven-resources-plugin + ${maven-resources-plugin.version} + + + replace-deployment-files + process-resources + + copy-resources + + + true + ${basedir}/../src + + + ${basedir}/../src/templates + + **/* + + true + + + + + + + + + + + spring + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + maven-central + Maven Central + https://repo.maven.apache.org/maven2 + + false + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + diff --git a/spring-cloud-dataflow-platform-cloudfoundry/pom.xml b/spring-cloud-dataflow-platform-cloudfoundry/pom.xml index 9f074c68fb..2743e835cc 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/pom.xml +++ b/spring-cloud-dataflow-platform-cloudfoundry/pom.xml @@ -4,10 +4,17 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-platform-cloudfoundry + spring-cloud-dataflow-platform-cloudfoundry + Data Platform Cloud Foundry jar + + true + 3.4.1 + io.pivotal.cfenv @@ -16,19 +23,22 @@ org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework.cloud spring-cloud-deployer-cloudfoundry + + + javax.annotation + javax.annotation-api + + - org.springframework.boot - spring-boot-configuration-processor - true - - - org.springframework.cloud - spring-cloud-starter-config + org.springframework.boot + spring-boot-configuration-processor + true io.pivotal.cfenv @@ -37,11 +47,15 @@ io.pivotal.cfenv java-cfenv-boot-pivotal-sso - ${java-cfenv-boot.version} - io.pivotal.spring.cloud - spring-cloud-services-starter-config-client + jakarta.servlet + jakarta.servlet-api + + + org.assertj + assertj-core + test org.springframework.boot @@ -51,6 +65,48 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + 1 + 1 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java index acc7921120..7d07183377 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryDataFlowServerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,27 +16,28 @@ package org.springframework.cloud.dataflow.server.config.cloudfoundry; -import javax.annotation.PostConstruct; - import reactor.core.publisher.Hooks; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnCloudPlatform; import org.springframework.boot.cloud.CloudPlatform; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; +import org.springframework.context.event.ContextRefreshedEvent; +import org.springframework.context.event.EventListener; /** * Configuration class for customizing Cloud Foundry deployer. * * @author Eric Bottard + * @author Corneil du Plessis */ +@AutoConfiguration @ConditionalOnCloudPlatform(CloudPlatform.CLOUD_FOUNDRY) -@Configuration public class CloudFoundryDataFlowServerConfiguration { - + private CloudFoundryServerConfigurationProperties cloudFoundryServerConfigurationProperties = new CloudFoundryServerConfigurationProperties(); @Bean @ConfigurationProperties(prefix = CloudFoundryConnectionProperties.CLOUDFOUNDRY_PROPERTIES + ".task") public CloudFoundryDeploymentProperties taskDeploymentProperties() { @@ -45,14 +46,16 @@ public CloudFoundryDeploymentProperties taskDeploymentProperties() { @Bean public CloudFoundryServerConfigurationProperties cloudFoundryServerConfigurationProperties() { - return new CloudFoundryServerConfigurationProperties(); + return cloudFoundryServerConfigurationProperties; } - @PostConstruct - public void afterPropertiesSet() { - if (cloudFoundryServerConfigurationProperties().isDebugReactor()) { + // Replaced PostConstruct with handling ContextRefreshedEvent allows for the + // resolution of beans to complete and to execute the code when configuration + // is updated. + @EventListener(ContextRefreshedEvent.class) + public void handleContextRefreshedEvent() { + if (this.cloudFoundryServerConfigurationProperties.isDebugReactor()) { Hooks.onOperatorDebug(); } } - } diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java index 122d47c938..5933b2c2f3 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformClientProvider.java @@ -20,11 +20,14 @@ import java.util.Map; import org.cloudfoundry.client.CloudFoundryClient; +import org.cloudfoundry.logcache.v1.LogCacheClient; import org.cloudfoundry.reactor.client.ReactorCloudFoundryClient; +import org.cloudfoundry.reactor.logcache.v1.ReactorLogCacheClient; /** * @author David Turanski - **/ + * @author Chris Bono + */ public class CloudFoundryPlatformClientProvider { private final CloudFoundryPlatformProperties platformProperties; @@ -35,6 +38,8 @@ public class CloudFoundryPlatformClientProvider { private final Map cloudFoundryClients = new HashMap<>(); + private final Map cloudFoundryLogClients = new HashMap<>(); + CloudFoundryPlatformClientProvider( CloudFoundryPlatformProperties platformProperties, CloudFoundryPlatformConnectionContextProvider connectionContextProvider, @@ -45,10 +50,16 @@ public class CloudFoundryPlatformClientProvider { } public CloudFoundryClient cloudFoundryClient(String account){ - cloudFoundryClients.putIfAbsent(account, ReactorCloudFoundryClient.builder() + return cloudFoundryClients.computeIfAbsent(account, (__) -> ReactorCloudFoundryClient.builder() + .connectionContext(connectionContextProvider.connectionContext(account)) + .tokenProvider(platformTokenProvider.tokenProvider(account)) + .build()); + } + + public LogCacheClient logCacheClient(String account) { + return cloudFoundryLogClients.computeIfAbsent(account, (__) -> ReactorLogCacheClient.builder() .connectionContext(connectionContextProvider.connectionContext(account)) .tokenProvider(platformTokenProvider.tokenProvider(account)) .build()); - return cloudFoundryClients.get(account); } } diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformProperties.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformProperties.java index 470406d6bc..060a17d44d 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformProperties.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformProperties.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2019 the original author or authors. + * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ import org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryPlatformProperties.CloudFoundryProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; -import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties; /** * @author Mark Pollack @@ -40,8 +39,6 @@ public static class CloudFoundryProperties { private CloudFoundryDeploymentProperties deployment; - private CloudFoundrySchedulerProperties scheduler; - public CloudFoundryConnectionProperties getConnection() { return connection; } @@ -57,13 +54,5 @@ public CloudFoundryDeploymentProperties getDeployment() { public void setDeployment(CloudFoundryDeploymentProperties deployment) { this.deployment = deployment; } - - public CloudFoundrySchedulerProperties getScheduler() { - return scheduler; - } - - public void setScheduler(CloudFoundrySchedulerProperties scheduler) { - this.scheduler = scheduler; - } } } diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundrySchedulerClientProvider.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundrySchedulerClientProvider.java index 8b6df12fca..151db68b68 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundrySchedulerClientProvider.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundrySchedulerClientProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,13 +16,10 @@ package org.springframework.cloud.dataflow.server.config.cloudfoundry; -import java.util.Optional; - import io.pivotal.reactor.scheduler.ReactorSchedulerClient; import io.pivotal.scheduler.SchedulerClient; import reactor.core.publisher.Mono; -import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties; /** * @author David Turanski @@ -33,29 +30,25 @@ public class CloudFoundrySchedulerClientProvider { private final CloudFoundryPlatformTokenProvider platformTokenProvider; - private final Optional schedulerProperties; + private final CloudFoundryPlatformProperties platformProperties; public CloudFoundrySchedulerClientProvider( CloudFoundryPlatformConnectionContextProvider connectionContextProvider, CloudFoundryPlatformTokenProvider platformTokenProvider, - Optional schedulerProperties) { + CloudFoundryPlatformProperties platformProperties) { this.connectionContextProvider = connectionContextProvider; this.platformTokenProvider = platformTokenProvider; - this.schedulerProperties = schedulerProperties; + this.platformProperties = platformProperties; } public SchedulerClient cloudFoundrySchedulerClient(String account) { return ReactorSchedulerClient.builder() .connectionContext(connectionContextProvider.connectionContext(account)) .tokenProvider(platformTokenProvider.tokenProvider(account)) - .root(Mono.just(schedulerProperties().getSchedulerUrl())) + .root(Mono.just(platformProperties.getAccounts().get(account).getDeployment().getSchedulerUrl())) .build(); } - public CloudFoundrySchedulerProperties schedulerProperties() { - return this.schedulerProperties.orElseGet(CloudFoundrySchedulerProperties::new); - } - } diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformAutoConfiguration.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformAutoConfiguration.java index dfcf14cdd1..2baa4ca45d 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformAutoConfiguration.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformAutoConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2019 the original author or authors. + * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,13 +17,12 @@ import java.util.Optional; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.dataflow.core.TaskPlatform; import org.springframework.cloud.dataflow.server.config.CloudProfileProvider; import org.springframework.cloud.dataflow.server.config.features.ConditionalOnTasksEnabled; -import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; /** @@ -31,7 +30,7 @@ * @author Mark Pollack * @author David Turanski */ -@Configuration +@AutoConfiguration @ConditionalOnTasksEnabled @EnableConfigurationProperties(CloudFoundryPlatformProperties.class) public class CloudFoundryTaskPlatformAutoConfiguration { @@ -72,9 +71,9 @@ public CloudFoundryPlatformClientProvider cloudFoundryClientProvider( public CloudFoundrySchedulerClientProvider schedulerClientProvider( CloudFoundryPlatformConnectionContextProvider connectionContextProvider, CloudFoundryPlatformTokenProvider platformTokenProvider, - Optional schedulerProperties) { + CloudFoundryPlatformProperties cloudFoundryPlatformProperties) { return new CloudFoundrySchedulerClientProvider( - connectionContextProvider, platformTokenProvider, schedulerProperties); + connectionContextProvider, platformTokenProvider, cloudFoundryPlatformProperties); } @Bean diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java index 94207e0de6..41da8276bf 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 the original author or authors. + * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,6 +35,7 @@ import org.springframework.cloud.dataflow.core.AbstractTaskPlatformFactory; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.deployer.spi.app.AppDeployer; +import org.springframework.cloud.deployer.spi.cloudfoundry.ApplicationLogAccessor; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryAppDeployer; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; @@ -43,7 +44,6 @@ import org.springframework.cloud.deployer.spi.core.RuntimeEnvironmentInfo; import org.springframework.cloud.deployer.spi.scheduler.Scheduler; import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundryAppScheduler; -import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties; import org.springframework.cloud.deployer.spi.util.RuntimeVersionUtils; /** @@ -78,15 +78,16 @@ private CloudFoundryTaskPlatformFactory(CloudFoundryPlatformProperties cloudFoun @Override public Launcher createLauncher(String account) { - ConnectionContext connectionContext = connectionContext(account); - TokenProvider tokenProvider = tokenProvider(account); + connectionContext(account); + tokenProvider(account); CloudFoundryClient cloudFoundryClient = cloudFoundryClient(account); CloudFoundryOperations cloudFoundryOperations = cloudFoundryOperations(cloudFoundryClient, account); CloudFoundryTaskLauncher taskLauncher = new CloudFoundryTaskLauncher( cloudFoundryClient, deploymentProperties(account), cloudFoundryOperations, - runtimeEnvironmentInfo(cloudFoundryClient, account)); + runtimeEnvironmentInfo(cloudFoundryClient, account), + new ApplicationLogAccessor(this.cloudFoundryClientProvider.logCacheClient(account))); Launcher launcher = new Launcher(account, CLOUDFOUNDRY_PLATFORM_TYPE, taskLauncher, scheduler(account, taskLauncher, cloudFoundryOperations)); CloudFoundryConnectionProperties connectionProperties = connectionProperties(account); @@ -99,10 +100,10 @@ public Launcher createLauncher(String account) { private Scheduler scheduler(String account, CloudFoundryTaskLauncher taskLauncher, CloudFoundryOperations cloudFoundryOperations) { Scheduler scheduler = null; - if (cloudFoundrySchedulerClientProvider.isPresent() && this.platformProperties.getAccounts().get(account).getScheduler() != null) { - Optional schedulerProperties = Optional.of(this.platformProperties.getAccounts().get(account).getScheduler()); + if (cloudFoundrySchedulerClientProvider.isPresent() && this.platformProperties.getAccounts().get(account).getDeployment().getSchedulerUrl() != null) { + CloudFoundryDeploymentProperties deploymentProperties = this.platformProperties.getAccounts().get(account).getDeployment(); CloudFoundrySchedulerClientProvider cloudFoundrySchedulerClientProviderLocal = new CloudFoundrySchedulerClientProvider( - connectionContextProvider, platformTokenProvider, schedulerProperties); + connectionContextProvider, platformTokenProvider, this.platformProperties); SchedulerClient schedulerClient = cloudFoundrySchedulerClientProviderLocal.cloudFoundrySchedulerClient(account); scheduler = new CloudFoundryAppScheduler( @@ -110,7 +111,7 @@ private Scheduler scheduler(String account, CloudFoundryTaskLauncher taskLaunche cloudFoundryOperations, connectionProperties(account), taskLauncher, - schedulerProperties.get()); + deploymentProperties); } return scheduler; } @@ -185,7 +186,7 @@ public static class Builder { private boolean schedulesEnabled; - private Optional schedulerProperties = Optional.empty(); + private CloudFoundryDeploymentProperties deploymentProperties; private CloudFoundryPlatformTokenProvider platformTokenProvider; @@ -205,8 +206,8 @@ public Builder schedulesEnabled(boolean schedulesEnabled) { return this; } - public Builder schedulerProperties(Optional schedulerProperties) { - this.schedulerProperties = schedulerProperties; + public Builder schedulerProperties(CloudFoundryDeploymentProperties deploymentProperties) { + this.deploymentProperties = deploymentProperties; return this; } diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/CloudFoundryOAuthSecurityConfiguration.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/CloudFoundryOAuthSecurityConfiguration.java index 61cf34e0d7..e69de29bb2 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/CloudFoundryOAuthSecurityConfiguration.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/CloudFoundryOAuthSecurityConfiguration.java @@ -1,111 +0,0 @@ -/* - * Copyright 2017-2019 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.cloud.dataflow.server.config.cloudfoundry.security; - -import javax.annotation.PostConstruct; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.autoconfigure.condition.ConditionalOnCloudPlatform; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.cloud.CloudPlatform; -import org.springframework.cloud.common.security.OAuthSecurityConfiguration; -import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; -import org.springframework.cloud.common.security.support.CustomAuthoritiesOpaqueTokenIntrospector; -import org.springframework.cloud.common.security.support.DefaultAuthoritiesMapper; -import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled; -import org.springframework.cloud.dataflow.server.config.cloudfoundry.security.support.CloudFoundryDataflowAuthoritiesMapper; -import org.springframework.cloud.dataflow.server.config.cloudfoundry.security.support.CloudFoundrySecurityService; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.web.client.RestTemplate; - -/** - * When running inside Cloud Foundry, this {@link Configuration} class will reconfigure - * Spring Cloud Data Flow's security setup in {@link OAuthSecurityConfiguration}, so that - * only users with the CF_SPACE_DEVELOPER_ROLE} can access the REST APIs. - *

- * Therefore, this configuration will ensure that only Cloud Foundry - * {@code Space Developers} have access to the underlying REST API's. - *

- * For this to happen, a REST call will be made to the Cloud Foundry Permissions API via - * CloudFoundrySecurityService inside the {@link DefaultAuthoritiesMapper}. - *

- * If the user has the respective permissions, the CF_SPACE_DEVELOPER_ROLE will be - * assigned to the user. - *

- * See also: - * https://apidocs.cloudfoundry.org/258/apps/retrieving_permissions_on_a_app.html - * - * @author Gunnar Hillert - * @author Ilayaperumal Gopinathan - */ -@Configuration -@ConditionalOnCloudPlatform(CloudPlatform.CLOUD_FOUNDRY) -@Conditional(OnOAuth2SecurityEnabled.class) -@Import(CloudFoundryOAuthSecurityConfiguration.CloudFoundryUAAConfiguration.class) -public class CloudFoundryOAuthSecurityConfiguration { - - private static final Logger logger = LoggerFactory.getLogger(CloudFoundryOAuthSecurityConfiguration.class); - - @Autowired - private CustomAuthoritiesOpaqueTokenIntrospector customAuthoritiesOpaqueTokenIntrospector; - - @Autowired(required = false) - private CloudFoundryDataflowAuthoritiesMapper cloudFoundryDataflowAuthoritiesExtractor; - - @PostConstruct - public void init() { - if (this.cloudFoundryDataflowAuthoritiesExtractor != null) { - logger.info("Setting up Cloud Foundry AuthoritiesExtractor for UAA."); - this.customAuthoritiesOpaqueTokenIntrospector.setAuthorityMapper(this.cloudFoundryDataflowAuthoritiesExtractor); - } - } - - @Configuration - @ConditionalOnProperty(name = "spring.cloud.dataflow.security.cf-use-uaa", havingValue = "true") - public class CloudFoundryUAAConfiguration { - - @Value("${vcap.application.cf_api}") - private String cloudControllerUrl; - - @Value("${vcap.application.application_id}") - private String applicationId; - - @Bean - public CloudFoundryDataflowAuthoritiesMapper authoritiesExtractor( - CloudFoundrySecurityService cloudFoundrySecurityService - ) { - return new CloudFoundryDataflowAuthoritiesMapper(cloudFoundrySecurityService); - } - - @Bean - public CloudFoundrySecurityService cloudFoundrySecurityService( - OAuth2TokenUtilsService oauth2TokenUtilsService, - RestTemplate restTemplate) { - return new CloudFoundrySecurityService(oauth2TokenUtilsService, restTemplate, - this.cloudControllerUrl, - this.applicationId); - } - - } - -} diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/AccessLevel.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/AccessLevel.java deleted file mode 100644 index 41160e4621..0000000000 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/AccessLevel.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2017 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.cloud.dataflow.server.config.cloudfoundry.security.support; - -import java.util.Arrays; -import java.util.List; - -import javax.servlet.http.HttpServletRequest; - -/** - * The specific access level granted to the Cloud Foundry user that's calling the - * endpoints. - * - * @author Madhura Bhave - * @author Gunnar Hillert - */ -public enum AccessLevel { - - /** - * Restricted access to a limited set of endpoints. - */ - RESTRICTED("", "/health", "/info"), - - /** - * No access. - */ - NONE, - - /** - * Full access to all endpoints. - */ - FULL; - - private static final String REQUEST_ATTRIBUTE = "cloudFoundryAccessLevel"; - - private final List endpointPaths; - - AccessLevel(String... endpointPaths) { - this.endpointPaths = Arrays.asList(endpointPaths); - } - - public static AccessLevel get(HttpServletRequest request) { - return (AccessLevel) request.getAttribute(REQUEST_ATTRIBUTE); - } - - /** - * Returns if the access level should allow access to the specified endpoint path. - * @param endpointPath the endpoint path - * @return {@code true} if access is allowed - */ - public boolean isAccessAllowed(String endpointPath) { - return this.endpointPaths.isEmpty() || this.endpointPaths.contains(endpointPath); - } - - public void put(HttpServletRequest request) { - request.setAttribute(REQUEST_ATTRIBUTE, this); - } - -} diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundryAuthorizationException.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundryAuthorizationException.java deleted file mode 100644 index 33cc327a58..0000000000 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundryAuthorizationException.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright 2017 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.cloud.dataflow.server.config.cloudfoundry.security.support; - -import org.springframework.http.HttpStatus; - -/** - * Authorization exceptions thrown to limit access to the endpoints. - * - * @author Madhura Bhave - */ -public class CloudFoundryAuthorizationException extends RuntimeException { - - private static final long serialVersionUID = 1L; - - private final Reason reason; - - CloudFoundryAuthorizationException(Reason reason, String message) { - this(reason, message, null); - } - - CloudFoundryAuthorizationException(Reason reason, String message, Throwable cause) { - super(message); - this.reason = reason; - } - - /** - * Return the status code that should be returned to the client. - * @return the HTTP status code - */ - public HttpStatus getStatusCode() { - return getReason().getStatus(); - } - - /** - * Return the reason why the authorization exception was thrown. - * @return the reason - */ - public Reason getReason() { - return this.reason; - } - - /** - * Reasons why the exception can be thrown. - */ - enum Reason { - - ACCESS_DENIED(HttpStatus.FORBIDDEN), - - INVALID_AUDIENCE(HttpStatus.UNAUTHORIZED), - - INVALID_ISSUER(HttpStatus.UNAUTHORIZED), - - INVALID_KEY_ID(HttpStatus.UNAUTHORIZED), - - INVALID_SIGNATURE(HttpStatus.UNAUTHORIZED), - - INVALID_TOKEN(HttpStatus.UNAUTHORIZED), - - MISSING_AUTHORIZATION(HttpStatus.UNAUTHORIZED), - - TOKEN_EXPIRED(HttpStatus.UNAUTHORIZED), - - UNSUPPORTED_TOKEN_SIGNING_ALGORITHM(HttpStatus.UNAUTHORIZED), - - SERVICE_UNAVAILABLE(HttpStatus.SERVICE_UNAVAILABLE); - - private final HttpStatus status; - - Reason(HttpStatus status) { - this.status = status; - } - - public HttpStatus getStatus() { - return this.status; - } - } -} diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundryDataflowAuthoritiesMapper.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundryDataflowAuthoritiesMapper.java deleted file mode 100644 index d0631503b6..0000000000 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundryDataflowAuthoritiesMapper.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright 2017-2019 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.cloud.dataflow.server.config.cloudfoundry.security.support; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.springframework.cloud.common.security.support.AuthoritiesMapper; -import org.springframework.cloud.common.security.support.CoreSecurityRoles; -import org.springframework.cloud.common.security.support.SecurityConfigUtils; -import org.springframework.security.config.core.GrantedAuthorityDefaults; -import org.springframework.security.core.GrantedAuthority; -import org.springframework.security.core.authority.SimpleGrantedAuthority; -import org.springframework.util.StringUtils; - -/** - * This Spring Cloud Data Flow {@link AuthoritiesMapper} will assign all - * {@link CoreSecurityRoles} to the authenticated OAuth2 user IF the user is a "Space - * Developer" in Cloud Foundry. - * - * @author Gunnar Hillert - * - */ -public class CloudFoundryDataflowAuthoritiesMapper implements AuthoritiesMapper { - - private static final Logger logger = LoggerFactory - .getLogger(CloudFoundryDataflowAuthoritiesMapper.class); - - private final CloudFoundrySecurityService cloudFoundrySecurityService; - - public CloudFoundryDataflowAuthoritiesMapper(CloudFoundrySecurityService cloudFoundrySecurityService) { - this.cloudFoundrySecurityService = cloudFoundrySecurityService; - } - - /** - * The returned {@link List} of {@link GrantedAuthority}s contains all roles from - * {@link CoreSecurityRoles}. The roles are prefixed with the value specified in - * {@link GrantedAuthorityDefaults}. - * - * @param providerId Not used - * @param scopes Not used - * @param token Must not be null or empty. - */ - @Override - public Set mapScopesToAuthorities(String providerId, Set scopes, String token) { - if (cloudFoundrySecurityService.isSpaceDeveloper(token)) { - final List rolesAsStrings = new ArrayList<>(); - final Set grantedAuthorities = Stream.of(CoreSecurityRoles.values()) - .map(roleEnum -> { - final String roleName = SecurityConfigUtils.ROLE_PREFIX + roleEnum.getKey(); - rolesAsStrings.add(roleName); - return new SimpleGrantedAuthority(roleName); - }) - .collect(Collectors.toSet()); - logger.info("Adding ALL roles {} to Cloud Foundry Space Developer user.", - StringUtils.collectionToCommaDelimitedString(rolesAsStrings)); - return grantedAuthorities; - } - else { - return Collections.emptySet(); - } - } -} diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundrySecurityService.java b/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundrySecurityService.java deleted file mode 100644 index 8ffb6a60a0..0000000000 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/security/support/CloudFoundrySecurityService.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright 2017-2019 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.cloud.dataflow.server.config.cloudfoundry.security.support; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; -import org.springframework.http.HttpStatus; -import org.springframework.http.RequestEntity; -import org.springframework.util.Assert; -import org.springframework.web.client.HttpClientErrorException; -import org.springframework.web.client.HttpServerErrorException; -import org.springframework.web.client.RestTemplate; - -/** - * Cloud Foundry security service to handle REST calls to the cloud controller and UAA. - * - * @author Madhura Bhave - * @author Gunnar Hillert - * @author Ilayaperumal Gopinathan - * - */ -public class CloudFoundrySecurityService { - - private static final Logger logger = LoggerFactory.getLogger(CloudFoundrySecurityService.class); - - private final OAuth2TokenUtilsService oauth2TokenUtilsService; - private final RestTemplate restTemplate; - - private final String cloudControllerUrl; - - private final String applicationId; - - public CloudFoundrySecurityService(OAuth2TokenUtilsService oauth2TokenUtilsService, - RestTemplate restTemplate, String cloudControllerUrl, - String applicationId) { - Assert.notNull(oauth2TokenUtilsService, "oauth2TokenUtilsService must not be null."); - Assert.notNull(restTemplate, "restTemplate must not be null."); - Assert.notNull(cloudControllerUrl, "CloudControllerUrl must not be null."); - Assert.notNull(applicationId, "ApplicationId must not be null."); - this.oauth2TokenUtilsService = oauth2TokenUtilsService; - this.cloudControllerUrl = cloudControllerUrl; - this.applicationId = applicationId; - this.restTemplate = restTemplate; - } - - /** - * Returns {@code true} if the user (using the access-token from the authenticated user) - * has full {@link AccessLevel#FULL} for the provided - * {@code applicationId} - * - * @return true of the user is a space developer in Cloud Foundry - */ - public boolean isSpaceDeveloper() { - final String accessToken = this.oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser(); - return isSpaceDeveloper(accessToken); - } - - public boolean isSpaceDeveloper(String accessToken) { - Assert.hasText(accessToken, "The accessToken must not be null or empty."); - final AccessLevel accessLevel = getAccessLevel( - accessToken, applicationId); - - if (AccessLevel.FULL.equals(accessLevel)) { - return true; - } - else { - return false; - } - } - - /** - * Return the access level that should be granted to the given token. - * @param token the token - * @param applicationId the cloud foundry application ID - * @return the access level that should be granted - * @throws CloudFoundryAuthorizationException if the token is not authorized - */ - public AccessLevel getAccessLevel(String token, String applicationId) - throws CloudFoundryAuthorizationException { - try { - final URI permissionsUri = getPermissionsUri(applicationId); - logger.info("Using PermissionsUri: " + permissionsUri); - RequestEntity request = RequestEntity.get(permissionsUri) - .header("Authorization", "bearer " + token).build(); - Map body = this.restTemplate.exchange(request, Map.class).getBody(); - if (Boolean.TRUE.equals(body.get("read_sensitive_data"))) { - return AccessLevel.FULL; - } - else { - return AccessLevel.RESTRICTED; - } - } - catch (HttpClientErrorException ex) { - if (ex.getStatusCode().equals(HttpStatus.FORBIDDEN)) { - return AccessLevel.NONE; - } - // TODO GH-2627 - a class of the same name is in boot actuator 2.1. check for differnces - throw new CloudFoundryAuthorizationException(CloudFoundryAuthorizationException.Reason.INVALID_TOKEN, - "Invalid token", ex); - } - catch (HttpServerErrorException ex) { - throw new CloudFoundryAuthorizationException(CloudFoundryAuthorizationException.Reason.SERVICE_UNAVAILABLE, - "Cloud controller not reachable"); - } - } - - private URI getPermissionsUri(String applicationId) { - try { - return new URI(this.cloudControllerUrl + "/v2/apps/" + applicationId - + "/permissions"); - } - catch (URISyntaxException ex) { - throw new IllegalStateException(ex); - } - } -} diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-platform-cloudfoundry/src/main/resources/META-INF/spring.factories deleted file mode 100644 index 42018a33bb..0000000000 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/main/resources/META-INF/spring.factories +++ /dev/null @@ -1,3 +0,0 @@ -org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ - org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryDataFlowServerConfiguration,\ - org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryTaskPlatformAutoConfiguration diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-dataflow-platform-cloudfoundry/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 0000000000..25cd7a0eaf --- /dev/null +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1,2 @@ +org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryDataFlowServerConfiguration +org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryTaskPlatformAutoConfiguration \ No newline at end of file diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java index 2e98ba4c3d..01e62312ac 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryPlatformPropertiesTests.java @@ -17,46 +17,44 @@ import java.util.Map; -import org.assertj.core.api.Assertions; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ActiveProfiles; -import org.springframework.test.context.junit4.SpringRunner; import static org.assertj.core.api.Assertions.assertThat; /** * @author Donovan Muller * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) @SpringBootTest(classes = CloudFoundryPlatformPropertiesTests.TestConfig.class) @ActiveProfiles("cloudfoundry-platform-properties") -public class CloudFoundryPlatformPropertiesTests { +class CloudFoundryPlatformPropertiesTests { @Autowired private CloudFoundryPlatformProperties cloudFoundryPlatformProperties; @Test - public void deserializationTest() { + void deserializationTest() { Map cfAccounts = this.cloudFoundryPlatformProperties .getAccounts(); - Assertions.assertThat(cfAccounts).hasSize(2); - Assertions.assertThat(cfAccounts).containsKeys("dev", "qa"); + assertThat(cfAccounts) + .hasSize(2) + .containsKeys("dev", "qa"); assertThat(cfAccounts.get("dev").getConnection().getOrg()).isEqualTo("myOrg"); assertThat(cfAccounts.get("dev").getConnection().getClientId()).isEqualTo("id1"); assertThat(cfAccounts.get("dev").getConnection().getClientSecret()).isEqualTo("secret1"); assertThat(cfAccounts.get("dev").getDeployment().getMemory()).isEqualTo("512m"); assertThat(cfAccounts.get("dev").getDeployment().getDisk()).isEqualTo("2048m"); assertThat(cfAccounts.get("dev").getDeployment().getInstances()).isEqualTo(4); - assertThat(cfAccounts.get("dev").getDeployment().getAppNamePrefix().equals("dev1")); + assertThat(cfAccounts.get("dev").getDeployment().getAppNamePrefix()).isEqualTo("dev1"); assertThat(cfAccounts.get("dev").getDeployment().getServices()) - .containsExactly("rabbit", "mysql"); + .containsExactlyInAnyOrder("rabbit", "mysql"); assertThat(cfAccounts.get("qa").getConnection().getOrg()).isEqualTo("myOrgQA"); assertThat(cfAccounts.get("qa").getConnection().getClientId()).isEqualTo("id2"); @@ -64,9 +62,9 @@ public void deserializationTest() { assertThat(cfAccounts.get("qa").getDeployment().getMemory()).isEqualTo("756m"); assertThat(cfAccounts.get("qa").getDeployment().getDisk()).isEqualTo("724m"); assertThat(cfAccounts.get("qa").getDeployment().getInstances()).isEqualTo(2); - assertThat(cfAccounts.get("qa").getDeployment().getAppNamePrefix().equals("qa1")); + assertThat(cfAccounts.get("qa").getDeployment().getAppNamePrefix()).isEqualTo("qa1"); assertThat(cfAccounts.get("qa").getDeployment().getServices()) - .containsExactly("rabbitQA", "mysqlQA"); + .containsExactlyInAnyOrder("rabbitQA", "mysqlQA"); } @Configuration diff --git a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java index 007668966d..0fc3684b33 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java +++ b/spring-cloud-dataflow-platform-cloudfoundry/src/test/java/org/springframework/cloud/dataflow/server/config/cloudfoundry/CloudFoundryTaskPlatformFactoryTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 the original author or authors. + * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ package org.springframework.cloud.dataflow.server.config.cloudfoundry; -import java.net.URL; +import java.net.URI; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -32,9 +32,10 @@ import org.cloudfoundry.client.v2.spaces.ListSpacesResponse; import org.cloudfoundry.client.v2.spaces.SpaceResource; import org.cloudfoundry.client.v2.spaces.Spaces; +import org.cloudfoundry.logcache.v1.LogCacheClient; import org.cloudfoundry.reactor.TokenProvider; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import reactor.core.publisher.Mono; import org.springframework.cloud.dataflow.core.Launcher; @@ -44,7 +45,6 @@ import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryTaskLauncher; -import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; @@ -55,20 +55,21 @@ /** * @author David Turanski * @author Glenn Renfro + * @author Corneil du Plessis **/ -public class CloudFoundryTaskPlatformFactoryTests { +class CloudFoundryTaskPlatformFactoryTests { private CloudFoundryPlatformTokenProvider platformTokenProvider; private CloudFoundryPlatformConnectionContextProvider connectionContextProvider; - private CloudFoundryPlatformClientProvider cloudFoundryClientProvider = mock( - CloudFoundryPlatformClientProvider.class); + private CloudFoundryPlatformClientProvider cloudFoundryClientProvider; - private CloudFoundrySchedulerClientProvider cloudFoundrySchedulerClientProvider = mock( - CloudFoundrySchedulerClientProvider.class); + private CloudFoundrySchedulerClientProvider cloudFoundrySchedulerClientProvider; - private CloudFoundryClient cloudFoundryClient = mock(CloudFoundryClient.class); + private CloudFoundryClient cloudFoundryClient; + + private LogCacheClient logCacheClient; private CloudFoundryPlatformProperties cloudFoundryPlatformProperties; @@ -78,8 +79,13 @@ public class CloudFoundryTaskPlatformFactoryTests { private CloudFoundryDeploymentProperties deploymentProperties; - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() throws Exception { + cloudFoundryClientProvider = mock(CloudFoundryPlatformClientProvider.class); + cloudFoundrySchedulerClientProvider = mock(CloudFoundrySchedulerClientProvider.class); + cloudFoundryClient = mock(CloudFoundryClient.class); + logCacheClient = mock(LogCacheClient.class); + when(this.cloudFoundryClient.info()) .thenReturn(getInfoRequest -> Mono.just(GetInfoResponse.builder().apiVersion("0.0.0").build())); when(this.cloudFoundryClient.organizations()).thenReturn(mock(Organizations.class)); @@ -87,19 +93,21 @@ public void setUp() throws Exception { when(this.cloudFoundryClient.organizations().list(any())).thenReturn(listOrganizationsResponse()); when(this.cloudFoundryClient.spaces().list(any())).thenReturn(listSpacesResponse()); when(this.cloudFoundryClientProvider.cloudFoundryClient(anyString())).thenReturn(this.cloudFoundryClient); + when(this.cloudFoundryClientProvider.logCacheClient(anyString())).thenReturn(this.logCacheClient); + this.cloudFoundryPlatformProperties = new CloudFoundryPlatformProperties(); this.defaultConnectionProperties = new CloudFoundryConnectionProperties(); this.defaultConnectionProperties.setOrg("org"); this.defaultConnectionProperties.setSpace("space"); - this.defaultConnectionProperties.setUrl(new URL("https://localhost:9999/")); + this.defaultConnectionProperties.setUrl(URI.create("https://localhost:9999/").toURL()); this.deploymentProperties = new CloudFoundryDeploymentProperties(); this.deploymentProperties.setApiTimeout(1L); } @Test - public void cloudFoundryTaskPlatformNoScheduler() { + void cloudFoundryTaskPlatformNoScheduler() { setupSinglePlatform(); TaskPlatformFactory taskPlatformFactory = CloudFoundryTaskPlatformFactory .builder() @@ -120,15 +128,15 @@ public void cloudFoundryTaskPlatformNoScheduler() { } @Test - public void cloudFoundryTaskPlatformWithScheduler() { + void cloudFoundryTaskPlatformWithScheduler() { setupSinglePlatform(); when(this.cloudFoundrySchedulerClientProvider.cloudFoundrySchedulerClient(anyString())).thenReturn( mock(SchedulerClient.class)); CloudFoundryProperties cloudFoundryProperties = this.cloudFoundryPlatformProperties.getAccounts().get("default"); - CloudFoundrySchedulerProperties cloudFoundrySchedulerProperties = new CloudFoundrySchedulerProperties(); - cloudFoundrySchedulerProperties.setSchedulerUrl("https://localhost:9999/"); - cloudFoundryProperties.setScheduler(cloudFoundrySchedulerProperties); + CloudFoundryDeploymentProperties cloudFoundryDeploymentProperties = new CloudFoundryDeploymentProperties(); + cloudFoundryDeploymentProperties.setSchedulerUrl("https://localhost:9999/"); + cloudFoundryProperties.setDeployment(cloudFoundryDeploymentProperties); TaskPlatform taskPlatform = getSchedulePlatform("default"); assertThat(taskPlatform.getLaunchers()).hasSize(1); @@ -140,7 +148,7 @@ public void cloudFoundryTaskPlatformWithScheduler() { } @Test - public void cloudFoundryTaskMultiPlatformWithScheduler() throws Exception{ + void cloudFoundryTaskMultiPlatformWithScheduler() throws Exception{ setupMultiPlatform(); when(this.cloudFoundrySchedulerClientProvider.cloudFoundrySchedulerClient(anyString())).thenReturn( mock(SchedulerClient.class)); @@ -155,7 +163,7 @@ public void cloudFoundryTaskMultiPlatformWithScheduler() throws Exception{ launcher = taskPlatform.getLaunchers().get(1); validateBasicLauncherInfo(launcher, "anotherOrgSpace"); - assertThat(launcher.getScheduler()).isNotNull(); + assertThat(launcher.getScheduler()).isNull(); assertThat(launcher.getDescription()).isEqualTo( "org = [another-org], space = [another-space], url = [https://localhost:9999]"); @@ -181,7 +189,9 @@ private void setupSinglePlatform() { private TaskPlatform getSchedulePlatform(String platformName) { CloudFoundryProperties cloudFoundryProperties = this.cloudFoundryPlatformProperties.getAccounts().get(platformName); - + CloudFoundryDeploymentProperties cloudFoundryDeploymentProperties = new CloudFoundryDeploymentProperties(); + cloudFoundryDeploymentProperties.setSchedulerUrl("https://localhost:9999/"); + cloudFoundryProperties.setDeployment(cloudFoundryDeploymentProperties); TaskPlatformFactory taskPlatformFactory = CloudFoundryTaskPlatformFactory .builder() .platformProperties(this.cloudFoundryPlatformProperties) @@ -190,7 +200,7 @@ private TaskPlatform getSchedulePlatform(String platformName) { .cloudFoundryClientProvider(this.cloudFoundryClientProvider) .cloudFoundrySchedulerClientProvider(Optional.of(this.cloudFoundrySchedulerClientProvider)) .schedulesEnabled(true) - .schedulerProperties(Optional.of(cloudFoundryProperties.getScheduler())) + .schedulerProperties(this.cloudFoundryPlatformProperties.getAccounts().get(platformName).getDeployment()) .build(); TaskPlatform taskPlatform = taskPlatformFactory.createTaskPlatform(); @@ -202,13 +212,12 @@ private void setupMultiPlatform() throws Exception{ this.anotherOrgSpaceConnectionProperties = new CloudFoundryConnectionProperties(); this.anotherOrgSpaceConnectionProperties.setOrg("another-org"); this.anotherOrgSpaceConnectionProperties.setSpace("another-space"); - this.anotherOrgSpaceConnectionProperties.setUrl(new URL("https://localhost:9999/")); + this.anotherOrgSpaceConnectionProperties.setUrl(URI.create("https://localhost:9999/").toURL()); CloudFoundryProperties cloudFoundryProperties = new CloudFoundryProperties(); - CloudFoundrySchedulerProperties cloudFoundrySchedulerProperties = new CloudFoundrySchedulerProperties(); - cloudFoundrySchedulerProperties.setSchedulerUrl("https://localhost:9999/"); - cloudFoundryProperties.setScheduler(cloudFoundrySchedulerProperties); + CloudFoundryDeploymentProperties cloudFoundryDeploymentProperties = new CloudFoundryDeploymentProperties(); + cloudFoundryDeploymentProperties.setSchedulerUrl("https://localhost:9999/"); cloudFoundryProperties.setDeployment(new CloudFoundryDeploymentProperties()); cloudFoundryProperties.setConnection(this.defaultConnectionProperties); Map platformMap = new HashMap<>(); @@ -216,7 +225,6 @@ private void setupMultiPlatform() throws Exception{ cloudFoundryProperties = new CloudFoundryProperties(); cloudFoundryProperties.setDeployment(new CloudFoundryDeploymentProperties()); cloudFoundryProperties.setConnection(this.anotherOrgSpaceConnectionProperties); - cloudFoundryProperties.setScheduler(cloudFoundrySchedulerProperties); platformMap.put("anotherOrgSpace", cloudFoundryProperties); @@ -230,7 +238,7 @@ private void setupMultiPlatform() throws Exception{ private Mono listOrganizationsResponse() { ListOrganizationsResponse response = ListOrganizationsResponse.builder() - .addAllResources(Collections.singletonList( + .addAllResources(Collections.singletonList( OrganizationResource.builder() .metadata(Metadata.builder().id("123").build()).build()) ).build(); @@ -239,7 +247,7 @@ private Mono listOrganizationsResponse() { private Mono listSpacesResponse() { ListSpacesResponse response = ListSpacesResponse.builder() - .addAllResources(Collections.singletonList( + .addAllResources(Collections.singletonList( SpaceResource.builder() .metadata(Metadata.builder().id("123").build()).build()) ).build(); diff --git a/spring-cloud-dataflow-platform-kubernetes/pom.xml b/spring-cloud-dataflow-platform-kubernetes/pom.xml index abe6e4564f..a7eb939d92 100644 --- a/spring-cloud-dataflow-platform-kubernetes/pom.xml +++ b/spring-cloud-dataflow-platform-kubernetes/pom.xml @@ -4,10 +4,19 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-platform-kubernetes + spring-cloud-dataflow-platform-kubernetes + Data Platform Kubernetes + jar + + true + 5.12.4 + 3.4.1 + org.springframework.cloud @@ -16,6 +25,7 @@ org.springframework.cloud spring-cloud-dataflow-server-core + ${project.version} org.springframework @@ -24,10 +34,17 @@ org.springframework.cloud spring-cloud-starter-kubernetes-fabric8-config + + + javax.annotation + javax.annotation-api + + io.fabric8 kubernetes-client + ${kubernetes-fabric8-client.version} org.springframework.boot @@ -40,4 +57,45 @@ test + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.13.0 + + true + ${java.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformTaskLauncherProperties.java b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformTaskLauncherProperties.java new file mode 100644 index 0000000000..7a43088dde --- /dev/null +++ b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformTaskLauncherProperties.java @@ -0,0 +1,28 @@ +/* + * Copyright 2017-2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.config.kubernetes; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.cloud.dataflow.core.AbstractPlatformProperties; +import org.springframework.cloud.deployer.spi.kubernetes.KubernetesTaskLauncherProperties; + +/** + * @author Ilayaperumal Gopinathan + */ +@ConfigurationProperties("spring.cloud.dataflow.task.platform.kubernetes") +public class KubernetesPlatformTaskLauncherProperties extends AbstractPlatformProperties { +} diff --git a/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesSchedulerAutoConfiguration.java b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesSchedulerAutoConfiguration.java index 03bad5cfea..4c0a0fc37d 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesSchedulerAutoConfiguration.java +++ b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesSchedulerAutoConfiguration.java @@ -17,12 +17,12 @@ package org.springframework.cloud.dataflow.server.config.kubernetes; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.cloud.dataflow.server.config.features.SchedulerConfiguration; import org.springframework.cloud.deployer.spi.kubernetes.KubernetesSchedulerProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.Profile; @@ -32,8 +32,8 @@ * * @author Chris Schaefer */ -@Configuration -@Conditional({ SchedulerConfiguration.SchedulerConfigurationPropertyChecker.class }) +@AutoConfiguration +@Conditional({SchedulerConfiguration.SchedulerConfigurationPropertyChecker.class}) @Profile("kubernetes") public class KubernetesSchedulerAutoConfiguration { diff --git a/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformAutoConfiguration.java b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformAutoConfiguration.java index be4acf0a0e..097c662de8 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformAutoConfiguration.java +++ b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformAutoConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2019 the original author or authors. + * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,12 +16,12 @@ package org.springframework.cloud.dataflow.server.config.kubernetes; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.dataflow.core.TaskPlatform; import org.springframework.cloud.dataflow.server.config.CloudProfileProvider; import org.springframework.cloud.dataflow.server.config.features.ConditionalOnTasksEnabled; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; import org.springframework.core.env.Environment; /** @@ -29,16 +29,17 @@ * @author Mark Pollack * @author David Turanski */ -@Configuration -@EnableConfigurationProperties(KubernetesPlatformProperties.class) +@AutoConfiguration +@EnableConfigurationProperties({KubernetesPlatformProperties.class, KubernetesPlatformTaskLauncherProperties.class}) @ConditionalOnTasksEnabled public class KubernetesTaskPlatformAutoConfiguration { @Bean public KubernetesTaskPlatformFactory kubernetesTaskPlatformFactory( KubernetesPlatformProperties platformProperties, - @Value("${spring.cloud.dataflow.features.schedules-enabled:false}") boolean schedulesEnabled) { - return new KubernetesTaskPlatformFactory(platformProperties, schedulesEnabled); + @Value("${spring.cloud.dataflow.features.schedules-enabled:false}") boolean schedulesEnabled, + KubernetesPlatformTaskLauncherProperties platformTaskLauncherProperties) { + return new KubernetesTaskPlatformFactory(platformProperties, schedulesEnabled, platformTaskLauncherProperties); } @Bean diff --git a/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactory.java b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactory.java index c9a084ad2a..0814b3466f 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactory.java +++ b/spring-cloud-dataflow-platform-kubernetes/src/main/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactory.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2020 the original author or authors. + * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,9 +16,10 @@ package org.springframework.cloud.dataflow.server.config.kubernetes; +import java.util.List; + import io.fabric8.kubernetes.client.KubernetesClient; -import org.springframework.beans.BeanUtils; import org.springframework.cloud.dataflow.core.AbstractTaskPlatformFactory; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.deployer.spi.kubernetes.ContainerFactory; @@ -26,39 +27,43 @@ import org.springframework.cloud.deployer.spi.kubernetes.KubernetesClientFactory; import org.springframework.cloud.deployer.spi.kubernetes.KubernetesDeployerProperties; import org.springframework.cloud.deployer.spi.kubernetes.KubernetesScheduler; -import org.springframework.cloud.deployer.spi.kubernetes.KubernetesSchedulerProperties; import org.springframework.cloud.deployer.spi.kubernetes.KubernetesTaskLauncher; +import org.springframework.cloud.deployer.spi.kubernetes.KubernetesTaskLauncherProperties; import org.springframework.cloud.deployer.spi.scheduler.Scheduler; /** * @author David Turanski * @author Glenn Renfro + * @author Ilayaperumal Gopinathan **/ public class KubernetesTaskPlatformFactory extends AbstractTaskPlatformFactory { + private final KubernetesPlatformTaskLauncherProperties platformTaskLauncherProperties; + private final boolean schedulesEnabled; public KubernetesTaskPlatformFactory( KubernetesPlatformProperties platformProperties, - boolean schedulesEnabled) { + boolean schedulesEnabled, + KubernetesPlatformTaskLauncherProperties kubernetesPlatformTaskLauncherProperties) { super(platformProperties, KUBERNETES_PLATFORM_TYPE); this.schedulesEnabled = schedulesEnabled; + this.platformTaskLauncherProperties = kubernetesPlatformTaskLauncherProperties; } @Override public Launcher createLauncher(String account) { - KubernetesDeployerProperties kubernetesProperties = this.platformProperties.accountProperties(account); - ContainerFactory containerFactory = new DefaultContainerFactory( - this.platformProperties.accountProperties(account)); - KubernetesClient kubernetesClient = - KubernetesClientFactory.getKubernetesClient(this.platformProperties.accountProperties(account)); + KubernetesDeployerProperties kubernetesProperties = this.platformProperties.accountExists(account) ? + this.platformProperties.accountProperties(account) : new KubernetesDeployerProperties(); + KubernetesTaskLauncherProperties taskLauncherProperties = (this.platformTaskLauncherProperties.accountExists(account)) ? + this.platformTaskLauncherProperties.accountProperties(account) : new KubernetesTaskLauncherProperties(); + ContainerFactory containerFactory = new DefaultContainerFactory(kubernetesProperties); + KubernetesClient kubernetesClient = KubernetesClientFactory.getKubernetesClient(kubernetesProperties); KubernetesTaskLauncher kubernetesTaskLauncher = new KubernetesTaskLauncher( - kubernetesProperties, kubernetesClient, containerFactory); + kubernetesProperties, taskLauncherProperties, kubernetesClient, containerFactory); - KubernetesSchedulerProperties kubernetesSchedulerProperties = new KubernetesSchedulerProperties(); - BeanUtils.copyProperties(kubernetesProperties, kubernetesSchedulerProperties); - Scheduler scheduler = getScheduler(kubernetesSchedulerProperties, kubernetesClient); + Scheduler scheduler = getScheduler(kubernetesProperties, kubernetesClient); Launcher launcher = new Launcher(account, KUBERNETES_PLATFORM_TYPE, kubernetesTaskLauncher, scheduler); @@ -70,12 +75,30 @@ public Launcher createLauncher(String account) { return launcher; } - private Scheduler getScheduler(KubernetesSchedulerProperties kubernetesSchedulerProperties, + @Override + protected List createLaunchers() { + List launchers = super.createLaunchers(); + for (String account : this.platformTaskLauncherProperties.getAccounts().keySet()) { + try { + if (!this.platformProperties.accountExists(account)) { + launchers.add(createLauncher(account)); + } + } + catch (Exception e) { + logger.error("{} platform account [{}] could not be registered: {}", + this.platformType, account, e); + throw new IllegalStateException(e.getMessage(), e); + } + } + return launchers; + } + + private Scheduler getScheduler(KubernetesDeployerProperties kubernetesDeployerProperties, KubernetesClient kubernetesClient) { Scheduler scheduler = null; if (schedulesEnabled) { - scheduler = new KubernetesScheduler(kubernetesClient, kubernetesSchedulerProperties); + scheduler = new KubernetesScheduler(kubernetesClient, kubernetesDeployerProperties); } return scheduler; diff --git a/spring-cloud-dataflow-platform-kubernetes/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-platform-kubernetes/src/main/resources/META-INF/spring.factories deleted file mode 100644 index 6c89f0336f..0000000000 --- a/spring-cloud-dataflow-platform-kubernetes/src/main/resources/META-INF/spring.factories +++ /dev/null @@ -1,4 +0,0 @@ -org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ - org.springframework.cloud.dataflow.server.config.kubernetes.KubernetesTaskPlatformAutoConfiguration,\ - org.springframework.cloud.dataflow.server.config.kubernetes.KubernetesSchedulerAutoConfiguration - diff --git a/spring-cloud-dataflow-platform-kubernetes/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-dataflow-platform-kubernetes/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 0000000000..167abd392f --- /dev/null +++ b/spring-cloud-dataflow-platform-kubernetes/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1,2 @@ +org.springframework.cloud.dataflow.server.config.kubernetes.KubernetesSchedulerAutoConfiguration +org.springframework.cloud.dataflow.server.config.kubernetes.KubernetesTaskPlatformAutoConfiguration \ No newline at end of file diff --git a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java index 4f17da5e0e..cb517f85d0 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java +++ b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesPlatformPropertiesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,8 +18,7 @@ import java.util.Map; import io.fabric8.kubernetes.client.KubernetesClient; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -30,31 +29,34 @@ import org.springframework.cloud.deployer.spi.kubernetes.KubernetesDeployerProperties; import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ActiveProfiles; -import org.springframework.test.context.junit4.SpringRunner; import static org.assertj.core.api.Assertions.assertThat; /** * @author Donovan Muller + * @author Chris Bono + * @author Corneil du Plessis */ -@RunWith(SpringRunner.class) -@SpringBootTest(classes = KubernetesPlatformPropertiesTests.TestConfig.class) + +@SpringBootTest(classes = KubernetesPlatformPropertiesTests.TestConfig.class, + properties = {"spring.cloud.kubernetes.client.namespace=default"}) @ActiveProfiles("kubernetes-platform-properties") -public class KubernetesPlatformPropertiesTests { +class KubernetesPlatformPropertiesTests { @Autowired private KubernetesPlatformProperties kubernetesPlatformProperties; @Test - public void deserializationTest() { + void deserializationTest() { Map k8sAccounts = this.kubernetesPlatformProperties.getAccounts(); KubernetesClient devK8sClient = KubernetesClientFactory.getKubernetesClient(k8sAccounts.get("dev")); KubernetesClient qaK8sClient = KubernetesClientFactory.getKubernetesClient(k8sAccounts.get("qa")); - assertThat(k8sAccounts).hasSize(2); - assertThat(k8sAccounts).containsKeys("dev", "qa"); + assertThat(k8sAccounts) + .hasSize(2) + .containsKeys("dev", "qa"); assertThat(devK8sClient.getNamespace()).isEqualTo("dev1"); - assertThat(devK8sClient.getMasterUrl().toString()).isEqualTo("https://192.168.0.1:8443/"); - assertThat(qaK8sClient.getMasterUrl().toString()).isEqualTo("https://192.168.0.2:8443/"); + assertThat(devK8sClient.getMasterUrl()).hasToString("https://192.168.0.1:8443/"); + assertThat(qaK8sClient.getMasterUrl()).hasToString("https://192.168.0.2:8443/"); assertThat(k8sAccounts.get("dev").getImagePullPolicy()).isEqualTo(ImagePullPolicy.Always); assertThat(k8sAccounts.get("dev").getEntryPointStyle()).isEqualTo(EntryPointStyle.exec); assertThat(k8sAccounts.get("dev").getLimits().getCpu()).isEqualTo("4"); diff --git a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java index afa07b7ef0..733503a32b 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java +++ b/spring-cloud-dataflow-platform-kubernetes/src/test/java/org/springframework/cloud/dataflow/server/config/kubernetes/KubernetesTaskPlatformFactoryTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,30 +18,34 @@ import java.util.Collections; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskPlatform; import org.springframework.cloud.deployer.spi.kubernetes.KubernetesDeployerProperties; -import org.springframework.cloud.deployer.spi.kubernetes.KubernetesSchedulerProperties; import org.springframework.cloud.deployer.spi.kubernetes.KubernetesTaskLauncher; +import org.springframework.cloud.deployer.spi.kubernetes.KubernetesTaskLauncherProperties; +import org.springframework.cloud.deployer.spi.kubernetes.RestartPolicy; import org.springframework.test.util.ReflectionTestUtils; import static org.assertj.core.api.Assertions.assertThat; /** * @author David Turanski + * @author Ilayaperumal Gopinathan **/ -public class KubernetesTaskPlatformFactoryTests { +class KubernetesTaskPlatformFactoryTests { @Test - public void kubernetesTaskPlatformNoScheduler() { + void kubernetesTaskPlatformNoScheduler() { KubernetesPlatformProperties platformProperties = new KubernetesPlatformProperties(); KubernetesDeployerProperties deployerProperties = new KubernetesDeployerProperties(); + KubernetesTaskLauncherProperties taskLauncherProperties = new KubernetesTaskLauncherProperties(); platformProperties.setAccounts(Collections.singletonMap("k8s", deployerProperties)); - + KubernetesPlatformTaskLauncherProperties platformTaskLauncherProperties = new KubernetesPlatformTaskLauncherProperties(); + platformTaskLauncherProperties.setAccounts(Collections.singletonMap("k8s", taskLauncherProperties)); KubernetesTaskPlatformFactory kubernetesTaskPlatformFactory = new KubernetesTaskPlatformFactory( - platformProperties, false); + platformProperties, false, platformTaskLauncherProperties); TaskPlatform taskPlatform = kubernetesTaskPlatformFactory.createTaskPlatform(); assertThat(taskPlatform.getName()).isEqualTo("Kubernetes"); @@ -56,20 +60,23 @@ public void kubernetesTaskPlatformNoScheduler() { } @Test - public void kubernetesTaskPlatformWithScheduler() { + void kubernetesTaskPlatformWithScheduler() { KubernetesPlatformProperties platformProperties = new KubernetesPlatformProperties(); KubernetesDeployerProperties deployerProperties = new KubernetesDeployerProperties(); deployerProperties.getLimits().setMemory("5555Mi"); + KubernetesTaskLauncherProperties taskLauncherProperties = new KubernetesTaskLauncherProperties(); + taskLauncherProperties.setBackoffLimit(5); + taskLauncherProperties.setRestartPolicy(RestartPolicy.Never); platformProperties.setAccounts(Collections.singletonMap("k8s", deployerProperties)); - + KubernetesPlatformTaskLauncherProperties platformTaskLauncherProperties = new KubernetesPlatformTaskLauncherProperties(); KubernetesTaskPlatformFactory kubernetesTaskPlatformFactory = new KubernetesTaskPlatformFactory( - platformProperties, true); + platformProperties, true, platformTaskLauncherProperties); TaskPlatform taskPlatform = kubernetesTaskPlatformFactory.createTaskPlatform(); assertThat(taskPlatform.getName()).isEqualTo("Kubernetes"); assertThat(taskPlatform.getLaunchers()).hasSize(1); Launcher taskLauncher = taskPlatform.getLaunchers().get(0); - KubernetesSchedulerProperties properties = (KubernetesSchedulerProperties) ReflectionTestUtils.getField(taskLauncher.getScheduler(), "properties"); + KubernetesDeployerProperties properties = (KubernetesDeployerProperties) ReflectionTestUtils.getField(taskLauncher.getScheduler(), "properties"); assertThat(properties.getLimits().getMemory()).isEqualTo("5555Mi"); assertThat(taskLauncher.getScheduler()).isNotNull(); @@ -78,8 +85,48 @@ public void kubernetesTaskPlatformWithScheduler() { assertThat(taskLauncher.getType()).isEqualTo("Kubernetes"); assertThat(taskLauncher.getDescription()).matches("^master url = \\[.+\\], namespace = " - + "\\[.+\\], api version = \\[.+\\]$"); - + + "\\[.+\\], api version = \\[.+\\]$"); + KubernetesTaskLauncherProperties taskLauncherProps = (KubernetesTaskLauncherProperties) ReflectionTestUtils.getField(taskLauncher.getTaskLauncher(), "taskLauncherProperties"); } + @Test + void kubernetesTaskPlatformWithMultipleAccounts() { + KubernetesPlatformProperties platformProperties = new KubernetesPlatformProperties(); + KubernetesDeployerProperties deployerProperties = new KubernetesDeployerProperties(); + deployerProperties.getLimits().setMemory("5555Mi"); + KubernetesTaskLauncherProperties taskLauncherProperties = new KubernetesTaskLauncherProperties(); + taskLauncherProperties.setBackoffLimit(5); + taskLauncherProperties.setRestartPolicy(RestartPolicy.Never); + platformProperties.setAccounts(Collections.singletonMap("k8s", deployerProperties)); + KubernetesPlatformTaskLauncherProperties platformTaskLauncherProperties = new KubernetesPlatformTaskLauncherProperties(); + platformTaskLauncherProperties.setAccounts(Collections.singletonMap("test", taskLauncherProperties)); + KubernetesTaskPlatformFactory kubernetesTaskPlatformFactory = new KubernetesTaskPlatformFactory( + platformProperties, true, platformTaskLauncherProperties); + + TaskPlatform taskPlatform = kubernetesTaskPlatformFactory.createTaskPlatform(); + assertThat(taskPlatform.getName()).isEqualTo("Kubernetes"); + assertThat(taskPlatform.getLaunchers()).hasSize(2); + for (Launcher taskLauncher: taskPlatform.getLaunchers()) { + assertThat(taskLauncher.getName().equals("k8s") || taskLauncher.getName().equals("test")).isTrue(); + if (taskLauncher.getName().equals("k8s")) { + KubernetesDeployerProperties properties = (KubernetesDeployerProperties) ReflectionTestUtils.getField(taskLauncher.getScheduler(), "properties"); + assertThat(properties.getLimits().getMemory()).isEqualTo("5555Mi"); + + assertThat(taskLauncher.getScheduler()).isNotNull(); + assertThat(taskLauncher.getTaskLauncher()).isInstanceOf(KubernetesTaskLauncher.class); + assertThat(taskLauncher.getName()).isEqualTo("k8s"); + assertThat(taskLauncher.getType()).isEqualTo("Kubernetes"); + assertThat(taskLauncher.getDescription()).matches("^master url = \\[.+\\], namespace = " + + + "\\[.+\\], api version = \\[.+\\]$"); + } + else if (taskLauncher.getName().equals("test")) { + KubernetesTaskLauncherProperties taskLauncherProps = (KubernetesTaskLauncherProperties) ReflectionTestUtils.getField(taskLauncher.getTaskLauncher(), "taskLauncherProperties"); + + assertThat(taskLauncherProps.getBackoffLimit()).isEqualTo(5); + assertThat(taskLauncherProps.getRestartPolicy()).isEqualTo(RestartPolicy.Never); + } + } + + } } diff --git a/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml b/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml index 5094fee784..ea3fc7b20f 100644 --- a/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml +++ b/spring-cloud-dataflow-platform-kubernetes/src/test/resources/application-kubernetes-platform-properties.yml @@ -8,7 +8,7 @@ spring: dev: fabric8: masterUrl: https://192.168.0.1:8443 - namespace: dev1 + namespace: dev1 imagePullPolicy: Always entryPointStyle: exec limits: diff --git a/spring-cloud-dataflow-registry/pom.xml b/spring-cloud-dataflow-registry/pom.xml index 59c6831088..f2f8afbd2b 100644 --- a/spring-cloud-dataflow-registry/pom.xml +++ b/spring-cloud-dataflow-registry/pom.xml @@ -4,11 +4,17 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-registry spring-cloud-dataflow-registry + Data Flow Registry jar + + true + 3.4.1 + com.fasterxml.jackson.core @@ -17,6 +23,7 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud @@ -48,6 +55,10 @@ com.zaxxer HikariCP + + jakarta.persistence + jakarta.persistence-api + org.springframework.boot spring-boot-starter-test @@ -56,7 +67,49 @@ org.springframework.cloud spring-cloud-dataflow-audit + ${project.version} compile + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + 1 + 1 + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java index 4eaef2ccc6..d1d5d35614 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/repository/AppRegistrationRepositoryImpl.java @@ -19,15 +19,18 @@ import java.util.ArrayList; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.stream.Collectors; -import javax.persistence.EntityManager; -import javax.persistence.TypedQuery; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Predicate; -import javax.persistence.criteria.Root; +import jakarta.persistence.EntityManager; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.CriteriaBuilder; +import jakarta.persistence.criteria.CriteriaQuery; +import jakarta.persistence.criteria.Predicate; +import jakarta.persistence.criteria.Root; +import org.hibernate.query.sqm.tree.select.SqmSelectStatement; +import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; @@ -49,7 +52,7 @@ public class AppRegistrationRepositoryImpl implements AppRegistrationRepositoryC private final EntityManager entityManager; @Autowired - private AppRegistrationRepository appRegistrationRepository; + private ObjectProvider appRegistrationRepository; public AppRegistrationRepositoryImpl(EntityManager entityManager) { Assert.notNull(entityManager, "Entity manager cannot be null"); @@ -68,10 +71,10 @@ public Page findAllByTypeAndNameIsLikeAndVersionAndDefaultVersi predicates.add(cb.equal(appRegistrationRoot.get("type"), type)); } if (StringUtils.hasText(name)) { - predicates.add(cb.like(cb.lower(appRegistrationRoot.get("name")), "%" + name.toLowerCase() + "%")); + predicates.add(cb.like(cb.lower(appRegistrationRoot.get("name")), "%" + name.toLowerCase(Locale.ROOT) + "%")); } if (StringUtils.hasText(version)) { - predicates.add(cb.equal(cb.lower(appRegistrationRoot.get("version")), version.toLowerCase())); + predicates.add(cb.equal(cb.lower(appRegistrationRoot.get("version")), version.toLowerCase(Locale.ROOT))); } if (defaultVersion) { predicates.add(cb.isTrue(appRegistrationRoot.get("defaultVersion"))); @@ -84,22 +87,17 @@ public Page findAllByTypeAndNameIsLikeAndVersionAndDefaultVersi final List resultList = query.getResultList(); if (defaultVersion) { resultList.forEach(appRegistration -> { - HashSet versions = appRegistrationRepository.findAllByName(appRegistration.getName()).stream() + HashSet versions = appRegistrationRepository.getIfAvailable().findAllByName(appRegistration.getName()).stream() + .filter(ar -> ar.getType() == appRegistration.getType()) .map(AppRegistration::getVersion).collect(Collectors.toCollection(HashSet::new)); appRegistration.setVersions(versions); }); } - return new PageImpl<>(resultList, pageable, getTotalCount(cb, predicates.toArray(new Predicate[0]))); + return new PageImpl<>(resultList, pageable, getTotalCount(cq)); } - private Long getTotalCount(CriteriaBuilder criteriaBuilder, Predicate[] predicateArray) { - CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); - Root root = criteriaQuery.from(AppRegistration.class); - - criteriaQuery.select(criteriaBuilder.count(root)); - criteriaQuery.where(predicateArray); - - return entityManager.createQuery(criteriaQuery).getSingleResult(); + private Long getTotalCount(CriteriaQuery criteriaQuery) { + return (Long) entityManager.createQuery(((SqmSelectStatement)criteriaQuery).createCountQuery()).getSingleResult(); } } diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java index ae21112caa..0c96696a6e 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java @@ -20,12 +20,11 @@ import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; -import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Properties; -import java.util.function.BiFunction; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -71,12 +70,11 @@ * @author Oleg Zhurakousky * @author Christian Tzolov * @author Chris Schaefer + * @author Corneil du Plessis */ @Transactional public class DefaultAppRegistryService implements AppRegistryService { - public static final String METADATA_KEY_SUFFIX = "metadata"; - protected static final Logger logger = LoggerFactory.getLogger(DefaultAppRegistryService.class); private final AppRegistrationRepository appRegistrationRepository; @@ -105,7 +103,9 @@ public AppRegistration find(String name, ApplicationType type) { @Override public AppRegistration find(String name, ApplicationType type, String version) { - return this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion(name, type, version); + AppRegistration registration = this.appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion(name, type, version); + logger.debug("find:{}:{}:{}={}", name, type, version, registration); + return registration; } @Override @@ -363,24 +363,17 @@ else if (!StringUtils.hasText(uri.getSchemeSpecificPart())) { @Override public List importAll(boolean overwrite, Resource... resources) { - List registrations = new ArrayList<>(); - Stream.of(resources) - // parallel takes effect if multiple resources - .parallel() + List lines = Stream.of(resources) // take lines .flatMap(this::resourceAsLines) // take valid splitted lines - .flatMap(this::splitValidLines) - // reduce to AppRegistration map key'd by - .reduce(new HashMap(), reduceToAppRegistrations(), (left, right) -> { - // combiner is used if multiple resources caused parallel stream, - // then just let last processed resource to override. - left.putAll(right); - return left; - }) - // don't care about keys anymore - .values() - // back to stream + .flatMap(this::splitValidLines).collect(Collectors.toList()); + Map registrations = new HashMap<>(); + AppRegistration previous = null; + for(String [] line : lines) { + previous = createAppRegistrations(registrations, line, previous); + } + List result = registrations.values() .stream() // drop registration if it doesn't have main uri as user only had metadata .filter(ar -> ar.getUri() != null) @@ -388,54 +381,61 @@ public List importAll(boolean overwrite, Resource... resources) .filter(ar -> isOverwrite(ar, overwrite)) .map(ar -> { save(ar); - registrations.add(ar); return ar; }).collect(Collectors.toList()); - return registrations; + return result; } - private BiFunction, ? super String[], HashMap> reduceToAppRegistrations() { - return (map, lineSplit) -> { - String[] typeName = lineSplit[0].split("\\."); - if (typeName.length < 2 || typeName.length > 3) { - throw new IllegalArgumentException("Invalid format for app key '" + lineSplit[0] - + "'in file. Must be . or ..metadata"); - } - String type = typeName[0].trim(); - String name = typeName[1].trim(); - String version = getResourceVersion(lineSplit[1]); - // This is now versioned key - String key = type + name + version; - if (!map.containsKey(key) && map.containsKey(type + name + "latest")) { - key = type + name + "latest"; + private AppRegistration createAppRegistrations(Map registrations, String[] lineSplit, AppRegistration previous) { + String[] typeName = lineSplit[0].split("\\."); + if (typeName.length < 2 || typeName.length > 3) { + throw new IllegalArgumentException("Invalid format for app key '" + lineSplit[0] + + "'in file. Must be . or ..metadata or ..bootVersion"); + } + String type = typeName[0].trim(); + String name = typeName[1].trim(); + String extra = typeName.length == 3 ? typeName[2] : null; + String version = "bootVersion".equals(extra) ? null : getResourceVersion(lineSplit[1]); + // This is now versioned key + String key = type + name + version; + if (!registrations.containsKey(key) && registrations.containsKey(type + name + "latest")) { + key = type + name + "latest"; + } + // Allow bootVersion in descriptor file (already in 5.0.x stream app descriptor) + if("bootVersion".equals(extra)) { + if (previous == null) { + throw new IllegalArgumentException("Expected uri for bootVersion:" + lineSplit[0]); } - AppRegistration ar = map.getOrDefault(key, new AppRegistration()); - ar.setName(name); - ar.setType(ApplicationType.valueOf(type)); - ar.setVersion(version); - if (typeName.length == 2) { - // normal app uri - try { - ar.setUri(new URI(lineSplit[1])); - warnOnMalformedURI(lineSplit[0], ar.getUri()); - } - catch (Exception e) { - throw new IllegalArgumentException(e); - } + ApplicationType appType = ApplicationType.valueOf(type); + Assert.isTrue(appType == previous.getType() && name.equals(previous.getName()), "Expected previous to be same type and name for:" + lineSplit[0]); + // Do nothing with bootVersion though + return previous; + } + AppRegistration ar = registrations.getOrDefault(key, new AppRegistration()); + ar.setName(name); + ar.setType(ApplicationType.valueOf(type)); + ar.setVersion(version); + if (typeName.length == 2) { + // normal app uri + try { + ar.setUri(new URI(lineSplit[1])); + warnOnMalformedURI(lineSplit[0], ar.getUri()); + } catch (Exception e) { + throw new IllegalArgumentException(e); } - else if (typeName.length == 3) { + } else if (typeName.length == 3) { + if (extra.equals("metadata")) { // metadata app uri try { ar.setMetadataUri(new URI(lineSplit[1])); warnOnMalformedURI(lineSplit[0], ar.getMetadataUri()); - } - catch (Exception e) { + } catch (Exception e) { throw new IllegalArgumentException(e); } } - map.put(key, ar); - return map; - }; + } + registrations.put(key, ar); + return ar; } private Stream resourceAsLines(Resource resource) { diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommon.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommon.java index ab890df719..075ddb49f2 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommon.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommon.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -66,16 +67,14 @@ public AppResourceCommon(MavenProperties mavenProperties, ResourceLoader resourc */ public String getResourceVersion(Resource resource) { Assert.notNull(resource, "resource must not be null"); - if (resource instanceof MavenResource) { - MavenResource mavenResource = (MavenResource) resource; + if (resource instanceof MavenResource mavenResource) { return mavenResource.getVersion(); } - else if (resource instanceof DockerResource) { - DockerResource dockerResource = (DockerResource) resource; + else if (resource instanceof DockerResource dockerResource) { return getDockerImageTag(dockerResource); } - else if (resource instanceof UrlResource) { - return getUrlResourceVersion((UrlResource) resource); + else if (resource instanceof UrlResource urlResource) { + return getUrlResourceVersion(urlResource); } else { throw new IllegalArgumentException("Do not support extracting resource from Resource of type " @@ -101,9 +100,9 @@ private String getDockerImageTag(DockerResource dockerResource) { /** * Parse the version number from a {@link UrlResource}. It can match a simple * {@code -.jar} formatted name. For example, a resource ending in - * {@code file-sink-rabbit-1.2.0.RELEASE.jar} will return {@code 1.2.0.RELEASE}. Snapshot - * builds of the form {@code file-sink-rabbit-1.2.0.BUILD-SNAPSHOT.jar} and - * {@code file-sink-rabbit-1.2.0-SNAPSHOT.jar} and {@code file-sink-rabbit-1.2.0-SNAPSHOT-metadata.jar} are also supported + * {@code file-sink-rabbit-5.0.0.jar} will return {@code 5.0.0}. Snapshot + * builds of the form {@code file-sink-rabbit-5.0.1-SNAPSHOT.jar} and + * {@code file-sink-rabbit-5.0.1-SNAPSHOT-metadata.jar} are also supported * @param urlResource * @return */ @@ -158,7 +157,7 @@ public Resource getResource(String resourceUri) { throw new IllegalArgumentException("Invalid URI schema for resource: " + resourceUri + " Expected URI schema prefix like file://, http:// or classpath:// but got none"); } - scheme = scheme.toLowerCase(); + scheme = scheme.toLowerCase(Locale.ROOT); Assert.notNull(scheme, "a scheme (prefix) is required"); switch (scheme) { @@ -191,8 +190,7 @@ public Resource getResource(String resourceUri) { */ public String getResourceWithoutVersion(Resource resource) { Assert.notNull(resource, "resource must not be null"); - if (resource instanceof MavenResource) { - MavenResource mavenResource = (MavenResource) resource; + if (resource instanceof MavenResource mavenResource) { StringBuilder mavenResourceStringBuilder = new StringBuilder(); mavenResourceStringBuilder.append(String.format("maven://%s:%s", mavenResource.getGroupId(), @@ -208,12 +206,11 @@ public String getResourceWithoutVersion(Resource resource) { } return mavenResourceStringBuilder.toString(); } - else if (resource instanceof DockerResource) { - DockerResource dockerResource = (DockerResource) resource; + else if (resource instanceof DockerResource dockerResource) { return getDockerImageWithoutVersion(dockerResource); } - else if (resource instanceof UrlResource) { - return getUrlResourceWithoutVersion((UrlResource) resource); + else if (resource instanceof UrlResource urlResource) { + return getUrlResourceWithoutVersion(urlResource); } else { throw new IllegalArgumentException("Do not support extracting resource from Resource of type " diff --git a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java index f2bd076d42..387836139b 100644 --- a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java +++ b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java @@ -22,8 +22,9 @@ import java.util.Collections; import java.util.List; -import org.hamcrest.Matchers; -import org.junit.Test; +import org.assertj.core.api.Condition; +import org.assertj.core.condition.AllOf; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.springframework.cloud.dataflow.audit.service.DefaultAuditRecordService; @@ -39,17 +40,9 @@ import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.PageRequest; +import org.springframework.lang.Nullable; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasProperty; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; @@ -64,35 +57,36 @@ * @author Chris Schaefer * @author Ilayaperumal Gopinathan * @author David Turanski + * @author Corneil du Plessis */ -public class DefaultAppRegistryServiceTests { +class DefaultAppRegistryServiceTests { - private AppRegistrationRepository appRegistrationRepository = mock(AppRegistrationRepository.class); + private final AppRegistrationRepository appRegistrationRepository = mock(AppRegistrationRepository.class); - private ResourceLoader resourceLoader = new DefaultResourceLoader(); + private final ResourceLoader resourceLoader = new DefaultResourceLoader(); - private AppRegistryService appRegistryService = new DefaultAppRegistryService(appRegistrationRepository, + private final AppRegistryService appRegistryService = new DefaultAppRegistryService(appRegistrationRepository, new AppResourceCommon(new MavenProperties(), resourceLoader), mock(DefaultAuditRecordService.class)); @Test - public void testNotFound() { + void notFound() { AppRegistration registration = appRegistryService.find("foo", ApplicationType.source); - assertThat(registration, Matchers.nullValue()); + assertThat(registration).isNull(); } @Test - public void testFound() { + void found() { AppRegistration registration = appRegistration(); when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndDefaultVersionIsTrue( eq(registration.getName()), eq(registration.getType()))).thenReturn(registration); AppRegistration registration2 = appRegistryService.find("foo", ApplicationType.source); - assertThat(registration2.getName(), is("foo")); - assertThat(registration2.getType(), is(ApplicationType.source)); + assertThat(registration2.getName()).isEqualTo("foo"); + assertThat(registration2.getType()).isEqualTo(ApplicationType.source); } @Test - public void testMetadataResourceResolvesWhenAvailable() { + void metadataResourceResolvesWhenAvailable() { AppRegistration registration = appRegistration(); when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndDefaultVersionIsTrue( eq(registration.getName()), eq(registration.getType()))).thenReturn(registration); @@ -100,11 +94,11 @@ public void testMetadataResourceResolvesWhenAvailable() { AppRegistration registration2 = appRegistryService.find("foo", ApplicationType.source); Resource appMetadataResource = appRegistryService.getAppMetadataResource(registration2); - assertThat(appMetadataResource.getFilename(), is("foo-source-metadata")); + assertThat(appMetadataResource.getFilename()).isEqualTo("foo-source-metadata"); } @Test - public void testMetadataResourceNotAvailableResolvesToMainResource() { + void metadataResourceNotAvailableResolvesToMainResource() { AppRegistration registration = appRegistration(); registration.setMetadataUri(null); when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndDefaultVersionIsTrue( @@ -113,11 +107,11 @@ public void testMetadataResourceNotAvailableResolvesToMainResource() { AppRegistration registration2 = appRegistryService.find("foo", ApplicationType.source); Resource appMetadataResource = appRegistryService.getAppMetadataResource(registration2); - assertThat(appMetadataResource.getFilename(), is("foo-source")); + assertThat(appMetadataResource.getFilename()).isEqualTo("foo-source"); } @Test - public void testFindAll() { + void findAll() { AppRegistration fooSource = appRegistration("foo", ApplicationType.source, true); AppRegistration fooSink = appRegistration("foo", ApplicationType.sink, false); AppRegistration barSource = appRegistration("bar", ApplicationType.source, true); @@ -125,26 +119,19 @@ public void testFindAll() { List registrations = appRegistryService.findAll(); - assertThat(registrations, containsInAnyOrder( - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("classpath:/foo-source"))), - hasProperty("metadataUri", is(URI.create("classpath:/foo-source-metadata"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("bar")), - hasProperty("uri", is(URI.create("classpath:/bar-source"))), - hasProperty("metadataUri", is(URI.create("classpath:/bar-source-metadata"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("classpath:/foo-sink"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations) + .haveAtLeast(1, same("foo", ApplicationType.source, URI.create("classpath:/foo-source"), URI.create("classpath:/foo-source-metadata"))) + .haveAtLeast(1, same("bar", ApplicationType.source, URI.create("classpath:/bar-source"), URI.create("classpath:/bar-source-metadata"))) + .haveAtLeast(1, same("foo", ApplicationType.sink, URI.create("classpath:/foo-sink"), null)); } + static Condition appRegistrationWith(String name, URI uri, URI metadata, ApplicationType type) { + return metadata != null ? + new Condition<>(item -> name.equals(item.getName()) && uri.equals(item.getUri()) && metadata.equals(item.getMetadataUri()) && type.equals(item.getType()), "AppRegistrationWith") : + new Condition<>(item -> name.equals(item.getName()) && uri.equals(item.getUri()) && item.getMetadataUri() == null && type.equals(item.getType()), "AppRegistrationWith"); + } @Test - public void testFindAllPageable() { + void findAllPageable() { AppRegistration fooSource = appRegistration("foo", ApplicationType.source, true); AppRegistration fooSink = appRegistration("foo", ApplicationType.sink, false); AppRegistration barSource = appRegistration("bar", ApplicationType.source, true); @@ -154,38 +141,38 @@ public void testFindAllPageable() { .thenReturn(new PageImpl(Arrays.asList(fooSink, barSource), pageRequest1, 3)); Page registrations1 = appRegistryService.findAll(pageRequest1); - assertEquals(3, registrations1.getTotalElements()); - assertEquals(2, registrations1.getContent().size()); - assertEquals("foo", registrations1.getContent().get(0).getName()); - assertEquals("bar", registrations1.getContent().get(1).getName()); + assertThat(registrations1.getTotalElements()).isEqualTo(3); + assertThat(registrations1.getContent()).hasSize(2); + assertThat(registrations1.getContent().get(0).getName()).isEqualTo("foo"); + assertThat(registrations1.getContent().get(1).getName()).isEqualTo("bar"); PageRequest pageRequest2 = PageRequest.of(1, 2); when(appRegistrationRepository.findAll(eq(pageRequest2))) - .thenReturn(new PageImpl(Arrays.asList(fooSource), pageRequest2, 3)); + .thenReturn(new PageImpl(Collections.singletonList(fooSource), pageRequest2, 3)); Page registrations2 = appRegistryService.findAll(pageRequest2); - assertEquals(3, registrations2.getTotalElements()); - assertEquals(1, registrations2.getContent().size()); - assertEquals("foo", registrations2.getContent().get(0).getName()); + assertThat(registrations2.getTotalElements()).isEqualTo(3); + assertThat(registrations2.getContent()).hasSize(1); + assertThat(registrations2.getContent().get(0).getName()).isEqualTo("foo"); } @Test - public void testSaveNonDefaultApp() { + void saveNonDefaultApp() { AppRegistration fooSource = appRegistration("foo", ApplicationType.source, true); - assertFalse(fooSource.isDefaultVersion()); + assertThat(fooSource.isDefaultVersion()).isFalse(); AppRegistration saved = appRegistryService.save(fooSource); verify(appRegistrationRepository, times(1)).findAppRegistrationByNameAndTypeAndVersion( eq(fooSource.getName()), eq(fooSource.getType()), eq(fooSource.getVersion())); ArgumentCaptor appRegistrationCaptor = ArgumentCaptor.forClass(AppRegistration.class); verify(appRegistrationRepository, times(1)).save(appRegistrationCaptor.capture()); - assertTrue(appRegistrationCaptor.getValue().isDefaultVersion()); + assertThat(appRegistrationCaptor.getValue().isDefaultVersion()).isTrue(); } @Test - public void testSaveDefault() { + void saveDefault() { AppRegistration fooSource = appRegistration("foo", ApplicationType.source, true); - assertFalse(fooSource.isDefaultVersion()); + assertThat(fooSource.isDefaultVersion()).isFalse(); when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndDefaultVersionIsTrue( eq(fooSource.getName()), eq(fooSource.getType()))).thenReturn(fooSource); @@ -195,11 +182,11 @@ public void testSaveDefault() { eq(fooSource.getName()), eq(fooSource.getType()), eq(fooSource.getVersion())); ArgumentCaptor appRegistrationCaptor = ArgumentCaptor.forClass(AppRegistration.class); verify(appRegistrationRepository, times(1)).save(appRegistrationCaptor.capture()); - assertFalse(appRegistrationCaptor.getValue().isDefaultVersion()); + assertThat(appRegistrationCaptor.getValue().isDefaultVersion()).isFalse(); } @Test - public void testSaveExistingApp() { + void saveExistingApp() { AppRegistration fooSource = appRegistration("foo", ApplicationType.source, true); AppRegistration fooSource2 = appRegistration("foo", ApplicationType.source, true); fooSource2.setUri(null); @@ -207,7 +194,7 @@ public void testSaveExistingApp() { when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion( eq(fooSource2.getName()), eq(fooSource2.getType()), eq(fooSource2.getVersion()))) - .thenReturn(fooSource2); + .thenReturn(fooSource2); appRegistryService.save(fooSource); @@ -216,38 +203,36 @@ public void testSaveExistingApp() { ArgumentCaptor appRegistrationCaptor = ArgumentCaptor.forClass(AppRegistration.class); verify(appRegistrationRepository, times(1)).save(appRegistrationCaptor.capture()); - assertEquals(fooSource.getUri(), fooSource2.getUri()); - assertEquals(fooSource.getMetadataUri(), fooSource2.getMetadataUri()); + assertThat(fooSource2.getUri()).isEqualTo(fooSource.getUri()); + assertThat(fooSource2.getMetadataUri()).isEqualTo(fooSource.getMetadataUri()); } @Test - public void testImportAllOverwrite() { + void importAllOverwrite() { when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion( eq("foo"), eq(ApplicationType.source), eq("1.0"))).thenReturn(appRegistration()); when(appRegistrationRepository.findAppRegistrationByNameAndTypeAndVersion( eq("bar"), eq(ApplicationType.sink), eq("1.0"))).thenReturn(appRegistration()); assertThat(appRegistryService.importAll(false, - new ClassPathResource("AppRegistryTests-importAllOverwrite.properties", getClass())).size(), equalTo(0)); + new ClassPathResource("AppRegistryTests-importAllOverwrite.properties", getClass()))).isEmpty(); } @Test - public void testImportRealWorldJarsWithMetadata() { + void importRealWorldJarsWithMetadata() { appRegistryService.importAll(true, new ClassPathResource("AppRegistryTests-import-with-metadata.properties", getClass())); ArgumentCaptor appRegistrationCaptor = ArgumentCaptor.forClass(AppRegistration.class); verify(appRegistrationRepository, times(1)).save(appRegistrationCaptor.capture()); List registrations = appRegistrationCaptor.getAllValues(); AppRegistration appRegistration = registrations.get(0); - assertThat(appRegistration, hasProperty("name", is("cassandra"))); - assertThat(appRegistration, hasProperty("uri", - is(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE.jar")))); - assertThat(appRegistration, hasProperty("metadataUri", - is(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE-metadata.jar")))); - assertThat(appRegistration, hasProperty("type", is(ApplicationType.sink))); + assertThat(appRegistration.getName()).isEqualTo("cassandra"); + assertThat(appRegistration.getUri()).isEqualTo(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE.jar")); + assertThat(appRegistration.getMetadataUri()).isEqualTo(URI.create("http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.RELEASE/cassandra-sink-rabbit-2.1.0.RELEASE-metadata.jar")); + assertThat(appRegistration.getType()).isEqualTo(ApplicationType.sink); } @Test - public void testImportAll() { + void importAll() { final boolean overwrite = true; @@ -262,18 +247,10 @@ public void testImportAll() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("bar")), - hasProperty("uri", is(URI.create("http:/bar-source-1.0.0"))), - hasProperty("metadataUri", is(URI.create("http:/bar-source-metadata-1.0.0"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("http:/foo-sink-1.0.0"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations).contains( + new AppRegistration("bar", ApplicationType.source, URI.create("http:/bar-source-1.0.0"), URI.create("http:/bar-source-metadata-1.0.0")), + new AppRegistration("foo", ApplicationType.sink, URI.create("http:/foo-sink-1.0.0"), null) + ); // // Now import with overwrite = true // @@ -287,28 +264,15 @@ public void testImportAll() { registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("http:/foo-source-1.0.0"))), - hasProperty("metadataUri", is(URI.create("http:/foo-source-metadata-1.0.0"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("bar")), - hasProperty("uri", is(URI.create("http:/bar-source-1.0.0"))), - hasProperty("metadataUri", is(URI.create("http:/bar-source-metadata-1.0.0"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("http:/foo-sink-1.0.0"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations) + .haveAtLeast(1, same("foo", ApplicationType.source, URI.create("http:/foo-source-1.0.0"),URI.create("http:/foo-source-metadata-1.0.0"))) + .haveAtLeast(1, same("bar", ApplicationType.source, URI.create("http:/bar-source-1.0.0"), URI.create("http:/bar-source-metadata-1.0.0"))) + .haveAtLeast(1, same("foo", ApplicationType.sink, URI.create("http:/foo-sink-1.0.0"), null)); } @Test @SuppressWarnings("unchecked") - public void testImportMixedVersions() { + void importMixedVersions() { final boolean overwrite = true; @@ -323,33 +287,16 @@ public void testImportMixedVersions() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))); } @Test @SuppressWarnings("unchecked") - public void testImportMixedVersionsMultiFile() { + void importMixedVersionsMultiFile() { final boolean overwrite = true; @@ -365,33 +312,17 @@ public void testImportMixedVersionsMultiFile() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))); + } @Test @SuppressWarnings("unchecked") - public void testImportMixedVersionsWithSpaceAndComments() { + void importMixedVersionsWithSpaceAndComments() { final boolean overwrite = true; @@ -406,33 +337,17 @@ public void testImportMixedVersionsWithSpaceAndComments() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))); + } @Test @SuppressWarnings("unchecked") - public void testImportMixedVersionsWithMixedOrder() { + void importMixedVersionsWithMixedOrder() { final boolean overwrite = true; @@ -447,33 +362,17 @@ public void testImportMixedVersionsWithMixedOrder() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.0.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.1.RELEASE"))); + } @Test @SuppressWarnings("unchecked") - public void testImportMixedVersionsWithMissingAndOnlyMetadata() { + void importMixedVersionsWithMissingAndOnlyMetadata() { final boolean overwrite = true; @@ -487,28 +386,15 @@ public void testImportMixedVersionsWithMissingAndOnlyMetadata() { verify(appRegistrationRepository, times(3)).save(appRegistrationCaptor.capture()); List registrations = appRegistrationCaptor.getAllValues(); + assertThat(registrations) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))) + .haveAtLeast(1, same("time", ApplicationType.source, URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"),null)) + .haveAtLeast(1, same("log", ApplicationType.sink, URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"), URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1.RELEASE"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("time")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.0.RELEASE"))), - hasProperty("metadataUri", nullValue()), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("log")), - hasProperty("uri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:2.0.2.RELEASE"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:log-sink-rabbit:jar:metadata:2.0.2.RELEASE"))), - hasProperty("type", is(ApplicationType.sink))))); } @Test - public void testImportAllDockerLatest() { + void importAllDockerLatest() { appRegistryService.importAll(false, new ClassPathResource("AppRegistryTests-importAll-docker-latest.properties", getClass())); @@ -518,22 +404,14 @@ public void testImportAllDockerLatest() { List registrations = appRegistrationCaptor.getAllValues(); - assertThat(registrations, - containsInAnyOrder( - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("docker:springcloudstream/foo-source-kafka:latest"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:foo-source-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"))), - hasProperty("type", is(ApplicationType.source))), - allOf( - hasProperty("name", is("foo")), - hasProperty("uri", is(URI.create("docker:springcloudstream/foo-sink-kafka:latest"))), - hasProperty("metadataUri", is(URI.create("maven://org.springframework.cloud.stream.app:foo-sink-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"))), - hasProperty("type", is(ApplicationType.sink))))); + assertThat(registrations) + .haveAtLeast(1, same("foo", ApplicationType.source, URI.create("docker:springcloudstream/foo-source-kafka:latest"), URI.create("maven://org.springframework.cloud.stream.app:foo-source-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"))) + .haveAtLeast(1, same("foo", ApplicationType.sink, URI.create("docker:springcloudstream/foo-sink-kafka:latest"), URI.create("maven://org.springframework.cloud.stream.app:foo-sink-kafka:jar:metadata:2.1.2.BUILD-SNAPSHOT"))); + } @Test - public void testDelete() throws URISyntaxException { + void delete() throws URISyntaxException { AppRegistration fooSource = appRegistration("foo", ApplicationType.source, true); appRegistryService.delete(fooSource.getName(), fooSource.getType(), fooSource.getVersion()); verify(appRegistrationRepository, times(1)) @@ -542,7 +420,7 @@ public void testDelete() throws URISyntaxException { } @Test - public void testDeleteAll() throws URISyntaxException { + void deleteAll() throws URISyntaxException { List appsToDelete = Collections.emptyList(); appRegistryService.deleteAll(appsToDelete); verify(appRegistrationRepository, times(1)).deleteAll(appsToDelete); @@ -563,4 +441,12 @@ private AppRegistration appRegistration(String name, ApplicationType type, boole registration.setVersion("6.6.6"); return registration; } + static Condition same(String name, ApplicationType applicationType, URI uri, @Nullable URI metadataUri) { + return AllOf.allOf( + new Condition<>(r-> (name != null && r.getName().equals(name)) || (name == null && r.getName()==null), "AppRegistration.name:" + name), + new Condition<>(r-> (applicationType != null && applicationType.equals(r.getType())) || (applicationType == null && r.getType() == null), "AppRegistration.type:" + applicationType), + new Condition<>(r-> (uri != null && uri.equals(r.getUri())) || (uri == null && r.getUri() == null), "AppRegistration.uri:" + uri), + new Condition<>(r-> (metadataUri != null && metadataUri.equals(r.getMetadataUri())) || (metadataUri == null && r.getMetadataUri() == null), "AppRegistration.metadataUri:" + metadataUri) + ); + } } diff --git a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java index 70d1c136b7..921f1f7153 100644 --- a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java +++ b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/AppResourceCommonTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2019 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ import java.net.MalformedURLException; import java.net.URI; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.deployer.resource.docker.DockerResource; import org.springframework.cloud.deployer.resource.maven.MavenProperties; @@ -29,8 +29,8 @@ import org.springframework.core.io.UrlResource; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -39,183 +39,165 @@ * @author Mark Pollack * @author Ilayaperumal Gopinathan * @author Christian Tzolov + * @author Corneil du Plessis */ -public class AppResourceCommonTests { +class AppResourceCommonTests { private ResourceLoader resourceLoader = mock(ResourceLoader.class); private AppResourceCommon appResourceCommon = new AppResourceCommon(new MavenProperties(), resourceLoader); - @Test(expected = IllegalArgumentException.class) - public void testBadNamedJars() throws Exception { - UrlResource urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit.jar"); - appResourceCommon.getUrlResourceVersion(urlResource); + @Test + void badNamedJars() throws Exception { + UrlResource urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/5.0.0/file-sink-rabbit.jar"); + assertThatIllegalArgumentException().isThrownBy( () -> appResourceCommon.getUrlResourceVersion(urlResource)); } @Test - public void testInvalidUrlResourceWithoutVersion() throws Exception { + void invalidUrlResourceWithoutVersion() throws Exception { assertThat(appResourceCommon.getUrlResourceWithoutVersion( - new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit-1.2.0.RELEASE.jar"))) - .isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit"); + new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/5.0.0/file-sink-rabbit-5.0.0.jar"))) + .isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/5.0.0/file-sink-rabbit"); } @Test - public void testInvalidURIPath() throws Exception { + void invalidURIPath() throws Exception { UrlResource urlResource = new UrlResource("https://com.com-0.0.2-snapshot/"); - try { - appResourceCommon.getUrlResourceVersion(urlResource); - fail("Excepted IllegalArgumentException for an invalid URI path"); - } - catch (Exception e) { - assertThat(e.getMessage().equals("URI path doesn't exist")); - } - } - - @Test - public void testInvalidUriSchema() { - try { - appResourceCommon.getResource("springcloud/polyglot-python-processor:0.1"); - fail("Excepted IllegalArgumentException for an invalid URI schema prefix"); - } - catch (IllegalArgumentException iae) { - assertThat(iae.getMessage().equals("Invalid URI schema for resource: " + + assertThatThrownBy(() -> appResourceCommon.getUrlResourceVersion(urlResource)) + .hasMessage("URI path doesn't exist"); + } + + @Test + void invalidUriSchema() { + assertThatIllegalArgumentException().isThrownBy(() -> + appResourceCommon.getResource("springcloud/polyglot-python-processor:0.1")) + .withMessage("Invalid URI schema for resource: " + "springcloud/polyglot-python-processor:0.1 Expected URI schema prefix like file://, " + - "http:// or classpath:// but got none")); - } + "http:// or classpath:// but got none"); } @Test - public void testDefaultResource() { + void defaultResource() { String classpathUri = "classpath:AppRegistryTests-importAll.properties"; Resource resource = appResourceCommon.getResource(classpathUri); - assertTrue(resource instanceof ClassPathResource); + assertThat(resource instanceof ClassPathResource).isTrue(); } @Test - public void testDockerUriString() throws Exception { - String dockerUri = "docker:springcloudstream/log-sink-rabbit:1.2.0.RELEASE"; + void dockerUriString() throws Exception { + String dockerUri = "docker:springcloudstream/log-sink-rabbit:5.0.0"; Resource resource = appResourceCommon.getResource(dockerUri); - assertTrue(resource instanceof DockerResource); + assertThat(resource instanceof DockerResource).isTrue(); assertThat(resource.getURI().toString().equals(dockerUri)); } @Test - public void testJarMetadataUriDockerApp() throws Exception { - String appUri = "docker:springcloudstream/log-sink-rabbit:1.2.0.RELEASE"; - String metadataUri = "https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit-1.2.0.RELEASE.jar"; - Resource metadataResource = appResourceCommon.getMetadataResource(new URI(appUri), new URI(metadataUri)); + void jarMetadataUriDockerApp() throws Exception { + String appUri = "docker:springcloudstream/log-sink-rabbit:5.0.0"; + String metadataUri = "https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/5.0.0/file-sink-rabbit-5.0.0.jar"; + appResourceCommon.getMetadataResource(new URI(appUri), new URI(metadataUri)); verify(resourceLoader).getResource(eq(metadataUri)); } @Test - public void testMetadataUriHttpApp() throws Exception { - String appUri = "https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/1.2.0.RELEASE/file-sink-rabbit-1.2.0.RELEASE.jar"; + void metadataUriHttpApp() throws Exception { + String appUri = "https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/5.0.0/file-sink-rabbit-5.0.0.jar"; Resource metadataResource = appResourceCommon.getMetadataResource(new URI(appUri), null); - assertTrue(metadataResource instanceof UrlResource); + assertThat(metadataResource instanceof UrlResource).isTrue(); assertThat(metadataResource.getURI().toString().equals(appUri)); } @Test - public void testMetadataUriDockerApp() throws Exception { - String appUri = "docker:springcloudstream/log-sink-rabbit:1.2.0.RELEASE"; + void metadataUriDockerApp() throws Exception { + String appUri = "docker:springcloudstream/log-sink-rabbit:5.0.0"; Resource metadataResource = appResourceCommon.getMetadataResource(new URI(appUri), null); assertThat(metadataResource).isNotNull(); - assertTrue(metadataResource instanceof DockerResource); + assertThat(metadataResource instanceof DockerResource).isTrue(); } @Test - public void testResourceURIWithMissingFileNameExtension() throws Exception { + void resourceURIWithMissingFileNameExtension() throws Exception { UrlResource urlResource = new UrlResource("https://com.com-0.0.2-snapshot/test"); - try { - appResourceCommon.getUrlResourceVersion(urlResource); - fail("Excepted IllegalArgumentException for an invalid URI path"); - } - catch (Exception e) { - assertThat(e.getMessage().equals("URI file name extension doesn't exist")); - } + assertThatThrownBy(() -> appResourceCommon.getUrlResourceVersion(urlResource)) + .hasMessage("URI file name extension doesn't exist"); } @Test - public void testInvalidUrlResourceURI() throws Exception { + void invalidUrlResourceURI() throws Exception { UrlResource urlResource = new UrlResource("https://com.com-0.0.2-snapshot/test.zip"); - try { - appResourceCommon.getUrlResourceVersion(urlResource); - fail("Excepted IllegalArgumentException for an invalid URL resource URI"); - } - catch (Exception e) { - assertThat(e.getMessage().equals("Could not parse version from https://com.com-0.0.2-SNAPSHOT/test.zip, expected format is -.jar")); - } + assertThatThrownBy(() -> appResourceCommon.getUrlResourceVersion(urlResource)) + .hasMessageStartingWith("Could not parse version from https://com.com-0.0.2-SNAPSHOT/test.zip, expected format is -.jar"); } @Test - public void testJars() throws MalformedURLException { + void jars() throws MalformedURLException { //Dashes in artifact name - UrlResource urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-1.2.0.RELEASE.jar"); + UrlResource urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-5.0.0.jar"); String version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.RELEASE"); + assertThat(version).isEqualTo("5.0.0"); String theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); //No dashes in artfiact name - BUILD-SNAPSHOT - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file-1.2.0.BUILD-SNAPSHOT.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file-5.0.1-SNAPSHOT.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.BUILD-SNAPSHOT"); + assertThat(version).isEqualTo("5.0.1-SNAPSHOT"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file"); //No dashes in artfiact name - RELEASE - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file-1.2.0.RELEASE.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file-5.0.0.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.RELEASE"); + assertThat(version).isEqualTo("5.0.0"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file/file"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file/file"); //Spring style snapshots naming scheme - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-1.2.0.BUILD-SNAPSHOT.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-5.0.1-SNAPSHOT.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0.BUILD-SNAPSHOT"); + assertThat(version).isEqualTo("5.0.1-SNAPSHOT"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); //Standard maven style naming scheme - urlResource = new UrlResource("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-1.2.0-SNAPSHOT.jar"); + urlResource = new UrlResource("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit-5.0.1-SNAPSHOT.jar"); version = appResourceCommon.getUrlResourceVersion(urlResource); - assertThat(version).isEqualTo("1.2.0-SNAPSHOT"); + assertThat(version).isEqualTo("5.0.1-SNAPSHOT"); theRest = appResourceCommon.getResourceWithoutVersion(urlResource); - assertThat(theRest).isEqualTo("https://repo.spring.io/libs-release/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); + assertThat(theRest).isEqualTo("https://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/file-sink-rabbit/file-sink-rabbit"); } @Test - public void testGetResourceWithoutVersion() { + void getResourceWithoutVersion() { assertThat(appResourceCommon.getResourceWithoutVersion( - MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:war:exec:1.3.0.RELEASE"))) + MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:war:exec:5.0.0"))) .isEqualTo("maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:war:exec"); assertThat(appResourceCommon.getResourceWithoutVersion( - MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit::exec:1.3.0.RELEASE"))) + MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit::exec:5.0.0"))) .isEqualTo("maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:jar:exec"); assertThat(appResourceCommon.getResourceWithoutVersion( - MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:1.3.0.RELEASE"))) + MavenResource.parse("org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:5.0.0"))) .isEqualTo("maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:jar"); } @Test - public void testGetResource() { - String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:1.3.0.RELEASE"; + void getResource() { + String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:5.0.0"; Resource resource = appResourceCommon.getResource(mavenUri); assertThat(resource).isInstanceOf(MavenResource.class); } @Test - public void testGetResourceVersion() { - String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:1.3.0.RELEASE"; + void getResourceVersion() { + String mavenUri = "maven://org.springframework.cloud.stream.app:aggregate-counter-sink-rabbit:5.0.0"; String version = appResourceCommon.getResourceVersion(appResourceCommon.getResource(mavenUri)); - assertThat(version).isEqualTo("1.3.0.RELEASE"); + assertThat(version).isEqualTo("5.0.0"); } @Test - public void testGetMetadataResourceVersion() { - String httpUri = "http://repo.spring.io/release/org/springframework/cloud/stream/app/cassandra-sink-rabbit/2.1.0.BUILD-SNAPSHOT/cassandra-sink-rabbit-2.1.0.BUILD-SNAPSHOT-metadata.jar"; + void getMetadataResourceVersion() { + String httpUri = "http://repo.maven.apache.org/maven2/org/springframework/cloud/stream/app/cassandra-sink-rabbit/5.0.1-SNAPSHOT/cassandra-sink-rabbit-5.0.1-SNAPSHOT-metadata.jar"; String version = appResourceCommon.getResourceVersion(appResourceCommon.getResource(httpUri)); - assertThat(version).isEqualTo("2.1.0.BUILD-SNAPSHOT"); + assertThat(version).isEqualTo("5.0.1-SNAPSHOT"); } } diff --git a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java index 5b12270c61..9b255e40b9 100644 --- a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java +++ b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/support/DockerImageTests.java @@ -15,120 +15,76 @@ */ package org.springframework.cloud.dataflow.registry.support; -/** - * @author Mark Pollack - */ - import java.util.ArrayList; import java.util.List; +import java.util.stream.Stream; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.params.provider.Arguments.arguments; /** * Test for DockerImage parsing methods Code from https://github.com/vmware/admiral + * + * @author Mark Pollack + * @author Corneil du Plessis */ -@RunWith(Parameterized.class) -public class DockerImageTests { - private final String description; - - private final String fullImageName; - - private final String expectedHost; +class DockerImageTests { + + static class DockerImageNames implements ArgumentsProvider { + + @Override + public Stream provideArguments(ExtensionContext extensionContext) throws Exception { + List data = new ArrayList<>(); + data.add(arguments("all sections", "myhost:300/namespace/repo:tag", "myhost:300", "namespace", "repo", + "namespace/repo", "tag")); + data.add(arguments("repo and tag", "repo:tag", null, null, "repo", "library/repo", "tag")); + data.add(arguments("implicit registry, repo and tag", "library/repo:tag", null, "library", "repo", + "library/repo", "tag")); + data.add(arguments("repo without tag", "repo", null, null, "repo", "library/repo", "latest")); + data.add(arguments("namespace and repo", "namespace/repo", null, "namespace", "repo", "namespace/repo", + "latest")); + data.add( + arguments("host with dot and repo", "host.name/repo", "host.name", null, "repo", "repo", "latest")); + data.add(arguments("host with colon and repo", "host:3000/repo", "host:3000", null, "repo", "repo", + "latest")); + data.add(arguments("host with colon, repo and tag", "host:3000/repo:tag", "host:3000", null, "repo", "repo", + "tag")); + data.add(arguments("official repo with default namespace", "registry.hub.docker.com/library/repo:tag", + "registry.hub.docker.com", "library", "repo", "library/repo", "tag")); + data.add(arguments("official repo with custom namespace", "registry.hub.docker.com/user/repo:tag", + "registry.hub.docker.com", "user", "repo", "user/repo", "tag")); + data.add(arguments("official repo with default namespace", "docker.io/library/repo:tag", "docker.io", + "library", "repo", "library/repo", "tag")); + data.add(arguments("official repo with custom namespace", "docker.io/user/repo:tag", "docker.io", "user", + "repo", "user/repo", "tag")); + data.add(arguments("host and three path components of repo", "host/namespace/category/repo", "host", + "namespace/category", "repo", "namespace/category/repo", "latest")); + data.add(arguments("host, port, three path components of repo and tag", + "host:5000/namespace/category/repo:tag", "host:5000", "namespace/category", "repo", + "namespace/category/repo", "tag")); + + return data.stream(); + } - private final String expectedNamespace; - - private final String expectedRepo; - - private final String expectedNamespaceAndRepo; - - private final String expectedTag; - - /** - * @param expectedHost - * @param expectedNamespace - * @param expectedRepo - */ - public DockerImageTests(String description, String fullImageName, String expectedHost, - String expectedNamespace, - String expectedRepo, - String expectedNamespaceAndRepo, - String expectedTag) { - - this.description = description; - this.fullImageName = fullImageName; - this.expectedHost = expectedHost; - this.expectedNamespace = expectedNamespace; - this.expectedRepo = expectedRepo; - this.expectedNamespaceAndRepo = expectedNamespaceAndRepo; - this.expectedTag = expectedTag; } - @Parameterized.Parameters - public static List data() { - List data = new ArrayList<>(); - data.add(new String[] { "all sections", "myhost:300/namespace/repo:tag", "myhost:300", - "namespace", "repo", "namespace/repo", "tag" }); - - data.add(new String[] { "repo and tag", "repo:tag", null, null, "repo", "library/repo", - "tag" }); - - data.add(new String[] { "implicit registry, repo and tag", "library/repo:tag", null, - "library", "repo", "library/repo", "tag" }); - - data.add(new String[] { "repo without tag", "repo", null, null, "repo", "library/repo", - "latest" }); - - data.add(new String[] { "namespace and repo", "namespace/repo", null, "namespace", "repo", - "namespace/repo", "latest" }); - - data.add(new String[] { "host with dot and repo", "host.name/repo", "host.name", null, - "repo", "repo", "latest" }); - - data.add(new String[] { "host with colon and repo", "host:3000/repo", "host:3000", null, - "repo", "repo", "latest" }); - - data.add(new String[] { "host with colon, repo and tag", "host:3000/repo:tag", "host:3000", - null, "repo", "repo", "tag" }); - - data.add(new String[] { "official repo with default namespace", - "registry.hub.docker.com/library/repo:tag", "registry.hub.docker.com", "library", - "repo", "library/repo", "tag" }); - - data.add(new String[] { "official repo with custom namespace", - "registry.hub.docker.com/user/repo:tag", "registry.hub.docker.com", "user", "repo", - "user/repo", "tag" }); - - data.add(new String[] { "official repo with default namespace", - "docker.io/library/repo:tag", "docker.io", "library", "repo", "library/repo", - "tag" }); - - data.add(new String[] { "official repo with custom namespace", - "docker.io/user/repo:tag", "docker.io", "user", "repo", "user/repo", "tag" }); - - data.add(new String[] { "host and three path components of repo", - "host/namespace/category/repo", "host", "namespace/category", "repo", - "namespace/category/repo", "latest" }); - - data.add(new String[] { "host, port, three path components of repo and tag", - "host:5000/namespace/category/repo:tag", "host:5000", "namespace/category", "repo", - "namespace/category/repo", "tag" }); - - return data; - } - - @Test - public void testDockerImageParsing() { - + @ParameterizedTest + @ArgumentsSource(DockerImageNames.class) + void dockerImageParsing(String description, String fullImageName, String expectedHost, + String expectedNamespace, String expectedRepo, String expectedNamespaceAndRepo, String expectedTag) { DockerImage dockerImage = DockerImage.fromImageName(fullImageName); - assertEquals(description + ": host", expectedHost, dockerImage.getHost()); - assertEquals(description + ": namespace", expectedNamespace, dockerImage.getNamespace()); - assertEquals(description + ": repository", expectedRepo, dockerImage.getRepository()); - assertEquals(description + ": namespace and repo", expectedNamespaceAndRepo, - dockerImage.getNamespaceAndRepo()); - assertEquals(description + ": tag", expectedTag, dockerImage.getTag()); + assertThat(dockerImage.getHost()).as(description + ": host").isEqualTo(expectedHost); + assertThat(dockerImage.getNamespace()).as(description + ": namespace").isEqualTo(expectedNamespace); + assertThat(dockerImage.getRepository()).as(description + ": repository").isEqualTo(expectedRepo); + assertThat(dockerImage.getNamespaceAndRepo()).as(description + ": namespace and repo") + .isEqualTo(expectedNamespaceAndRepo); + assertThat(dockerImage.getTag()).as(description + ": tag").isEqualTo(expectedTag); } + } diff --git a/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importBootVersionsMissingURI.properties b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importBootVersionsMissingURI.properties new file mode 100644 index 0000000000..7cbe53c976 --- /dev/null +++ b/spring-cloud-dataflow-registry/src/test/resources/org/springframework/cloud/dataflow/registry/service/AppRegistryTests-importBootVersionsMissingURI.properties @@ -0,0 +1,4 @@ +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:3.0.0 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:3.0.0 +source.time=maven://org.springframework.cloud.stream.app:time-source-rabbit:2.0.1 +source.time.metadata=maven://org.springframework.cloud.stream.app:time-source-rabbit:jar:metadata:2.0.1 diff --git a/spring-cloud-dataflow-rest-client/pom.xml b/spring-cloud-dataflow-rest-client/pom.xml index 8fb4fabdd4..56ea93250f 100644 --- a/spring-cloud-dataflow-rest-client/pom.xml +++ b/spring-cloud-dataflow-rest-client/pom.xml @@ -4,11 +4,23 @@ org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-rest-client + spring-cloud-dataflow-rest-client + Data Flow Rest Client + jar + + true + 3.4.1 + + + org.springframework.security + spring-security-oauth2-client + com.fasterxml.jackson.datatype jackson-datatype-jdk8 @@ -30,18 +42,15 @@ spring-boot-configuration-processor true - - org.springframework.boot - spring-boot-starter-test - test - org.springframework.cloud spring-cloud-skipper + ${project.version} org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud @@ -58,5 +67,50 @@ jsr305 provided + + org.springframework.boot + spring-boot-starter-test + test + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.13.0 + + true + ${java.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + + + source + + jar + + package + + 3.3.0 + + + diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java index b89878e760..7c47ab83bd 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ * @author Patrick Peralta * @author Mark Fisher * @author Chris Schaefer + * @author Chris Bono */ public interface AppRegistryOperations { @@ -82,8 +83,10 @@ public interface AppRegistryOperations { * @param force if {@code true}, overwrites a pre-existing registration * @return the new app registration */ + @Deprecated AppRegistrationResource register(String name, ApplicationType type, String uri, String metadataUri, boolean force); + /** * Register an application name, type and version with its Maven coordinates. * @@ -95,6 +98,7 @@ public interface AppRegistryOperations { * @param force if {@code true}, overwrites a pre-existing registration * @return the new app registration */ + @Deprecated AppRegistrationResource register(String name, ApplicationType type, String version, String uri, String metadataUri, boolean force); diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java index 96ce270cd5..338fa6ff41 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,8 @@ * @author Patrick Peralta * @author Christian Tzolov * @author Chris Schaefer + * @author Chris Bono + * @author Corneil du Plessis */ public class AppRegistryTemplate implements AppRegistryOperations { /** @@ -60,7 +62,7 @@ public class AppRegistryTemplate implements AppRegistryOperations { /** * Construct a {@code AppRegistryTemplate} object. * - * @param restTemplate template for HTTP/rest commands + * @param restTemplate template for HTTP/rest commands * @param resourceSupport HATEOAS link support */ public AppRegistryTemplate(RestTemplate restTemplate, RepresentationModel resourceSupport) { @@ -112,31 +114,44 @@ public DetailedAppRegistrationResource info(String name, ApplicationType type, S } @Override - public AppRegistrationResource register(String name, ApplicationType type, String uri, String metadataUri, - boolean force) { - MultiValueMap values = new LinkedMultiValueMap(); - values.add("uri", uri); - if (metadataUri != null) { - values.add("metadata-uri", metadataUri); - } - values.add("force", Boolean.toString(force)); - + public AppRegistrationResource register( + String name, + ApplicationType type, + String uri, + String metadataUri, + boolean force + ) { + MultiValueMap values = valuesForRegisterPost(uri, metadataUri, force); return restTemplate.postForObject(appsLink.getHref() + "/{type}/{name}", values, AppRegistrationResource.class, type, name); } @Override - public AppRegistrationResource register(String name, ApplicationType type, String version, String uri, - String metadataUri, boolean force) { + public AppRegistrationResource register( + String name, + ApplicationType type, + String version, + String uri, + String metadataUri, + boolean force + ) { + MultiValueMap values = valuesForRegisterPost(uri, metadataUri, force); + return restTemplate.postForObject(appsLink.getHref() + "/{type}/{name}/{version}", values, + AppRegistrationResource.class, type, name, version); + } + + private MultiValueMap valuesForRegisterPost( + String uri, + String metadataUri, + boolean force + ) { MultiValueMap values = new LinkedMultiValueMap<>(); values.add("uri", uri); if (metadataUri != null) { values.add("metadata-uri", metadataUri); } values.add("force", Boolean.toString(force)); - - return restTemplate.postForObject(appsLink.getHref() + "/{type}/{name}/{version}", values, - AppRegistrationResource.class, type, name, version); + return values; } @Override diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java index 14262ccb29..34f49958e2 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java @@ -21,26 +21,21 @@ import java.util.Map; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import org.springframework.batch.core.ExitStatus; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobInstance; -import org.springframework.batch.core.JobParameter; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; import org.springframework.cloud.dataflow.rest.Version; -import org.springframework.cloud.dataflow.rest.client.support.ExitStatusJacksonMixIn; -import org.springframework.cloud.dataflow.rest.client.support.JobExecutionJacksonMixIn; -import org.springframework.cloud.dataflow.rest.client.support.JobInstanceJacksonMixIn; -import org.springframework.cloud.dataflow.rest.client.support.JobParameterJacksonMixIn; -import org.springframework.cloud.dataflow.rest.client.support.JobParametersJacksonMixIn; -import org.springframework.cloud.dataflow.rest.client.support.StepExecutionHistoryJacksonMixIn; -import org.springframework.cloud.dataflow.rest.client.support.StepExecutionJacksonMixIn; -import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; import org.springframework.cloud.dataflow.rest.resource.RootResource; import org.springframework.cloud.dataflow.rest.resource.about.AboutResource; import org.springframework.cloud.dataflow.rest.support.jackson.ExecutionContextJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.ExitStatusJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; +import org.springframework.cloud.dataflow.rest.support.jackson.JobExecutionJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.JobInstanceJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.JobParameterJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.JobParametersJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.StepExecutionHistoryJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.StepExecutionJacksonMixIn; import org.springframework.hateoas.Link; import org.springframework.hateoas.RepresentationModel; import org.springframework.hateoas.UriTemplate; @@ -72,7 +67,7 @@ public class DataFlowTemplate implements DataFlowOperations { /** * Holds discovered URLs of the API. */ - protected final Map resources = new HashMap(); + protected final Map resources = new HashMap<>(); /** * REST client for stream operations. @@ -133,8 +128,8 @@ public class DataFlowTemplate implements DataFlowOperations { * * @param baseURI Must not be null */ - public DataFlowTemplate(URI baseURI) { - this(baseURI, getDefaultDataflowRestTemplate()); + public DataFlowTemplate(URI baseURI, ObjectMapper mapper) { + this(baseURI, getDefaultDataflowRestTemplate(), mapper); } /** @@ -142,10 +137,10 @@ public DataFlowTemplate(URI baseURI) { * missing Mixins for Jackson will be added implicitly. For more information, please * see {@link #prepareRestTemplate(RestTemplate)}. * - * @param baseURI Must not be null + * @param baseURI Must not be null * @param restTemplate Must not be null */ - public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { + public DataFlowTemplate(URI baseURI, RestTemplate restTemplate, ObjectMapper mapper) { Assert.notNull(baseURI, "The provided baseURI must not be null."); Assert.notNull(restTemplate, "The provided restTemplate must not be null."); @@ -177,31 +172,37 @@ public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { if (resourceSupport.hasLink(StreamTemplate.DEFINITIONS_REL)) { this.streamOperations = new StreamTemplate(restTemplate, resourceSupport, getVersion()); this.runtimeOperations = new RuntimeTemplate(restTemplate, resourceSupport); - } - else { + } else { this.streamOperations = null; this.runtimeOperations = null; } if (resourceSupport.hasLink(TaskTemplate.DEFINITIONS_RELATION)) { + if (mapper == null) { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); + } this.taskOperations = new TaskTemplate(restTemplate, resourceSupport, getVersion()); this.jobOperations = new JobTemplate(restTemplate, resourceSupport); - if(resourceSupport.hasLink(SchedulerTemplate.SCHEDULES_RELATION)) { + if (resourceSupport.hasLink(SchedulerTemplate.SCHEDULES_RELATION)) { this.schedulerOperations = new SchedulerTemplate(restTemplate, resourceSupport); - } - else { + } else { schedulerOperations = null; } - } - else { + } else { this.taskOperations = null; this.jobOperations = null; this.schedulerOperations = null; } this.appRegistryOperations = new AppRegistryTemplate(restTemplate, resourceSupport); - this.completionOperations = new CompletionTemplate(restTemplate, - resourceSupport.getLink("completions/stream").get(), resourceSupport.getLink("completions/task").get()); - } - else { + this.completionOperations = new CompletionTemplate( + restTemplate, + resourceSupport.getLink("completions/stream").get(), + resourceSupport.getLink("completions/task").get() + ); + } else { this.aboutOperations = null; this.streamOperations = null; this.runtimeOperations = null; @@ -216,7 +217,7 @@ public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { private String getVersion() { String version = ""; AboutResource aboutResource = this.aboutOperations.get(); - if(aboutResource != null) { + if (aboutResource != null) { version = aboutResource.getVersionInfo().getCore().getVersion(); } return version; @@ -235,7 +236,7 @@ private String getVersion() { *

  • {@link ExecutionContextJacksonMixIn} *
  • {@link StepExecutionHistoryJacksonMixIn} * - * + *

    * Furthermore, this method will also register the {@link Jackson2HalModule} * * @param restTemplate Can be null. Instantiates a new {@link RestTemplate} if null @@ -251,9 +252,8 @@ public static RestTemplate prepareRestTemplate(RestTemplate restTemplate) { boolean containsMappingJackson2HttpMessageConverter = false; for (HttpMessageConverter converter : restTemplate.getMessageConverters()) { - if (converter instanceof MappingJackson2HttpMessageConverter) { + if (converter instanceof MappingJackson2HttpMessageConverter jacksonConverter) { containsMappingJackson2HttpMessageConverter = true; - final MappingJackson2HttpMessageConverter jacksonConverter = (MappingJackson2HttpMessageConverter) converter; DataFlowTemplate.prepareObjectMapper(jacksonConverter.getObjectMapper()); } } @@ -275,15 +275,7 @@ public static RestTemplate prepareRestTemplate(RestTemplate restTemplate) { public static ObjectMapper prepareObjectMapper(ObjectMapper objectMapper) { Assert.notNull(objectMapper, "The objectMapper must not be null."); return objectMapper - .registerModule(new Jackson2HalModule()) - .addMixIn(JobExecution.class, JobExecutionJacksonMixIn.class) - .addMixIn(JobParameters.class, JobParametersJacksonMixIn.class) - .addMixIn(JobParameter.class, JobParameterJacksonMixIn.class) - .addMixIn(JobInstance.class, JobInstanceJacksonMixIn.class) - .addMixIn(ExitStatus.class, ExitStatusJacksonMixIn.class) - .addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class) - .addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class) - .addMixIn(StepExecutionHistory.class, StepExecutionHistoryJacksonMixIn.class); + .registerModules(new Jackson2HalModule(), new Jackson2DataflowModule()); } /** @@ -296,12 +288,9 @@ public static RestTemplate getDefaultDataflowRestTemplate() { } public Link getLink(RepresentationModel resourceSupport, String rel) { - Link link = resourceSupport.getLink(rel).get(); - if (link == null) { - throw new DataFlowServerException( - "Server did not return a link for '" + rel + "', links: '" + resourceSupport + "'"); - } - return link; + return resourceSupport.getLink(rel).orElseThrow(() -> + new DataFlowServerException("Server did not return a link for '" + rel + "', links: '" + resourceSupport + "'") + ); } @Override diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java index ed575f0cd8..8bbd8dc325 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java @@ -38,10 +38,20 @@ public interface JobOperations { /** * Restarts a job by id * - * @param id job execution id + * @param id job execution id */ void executionRestart(long id); + /** + * Restarts a job by id + * + * @param id job execution id + * @param useJsonJobParameters if true {@link org.springframework.batch.core.JobParameters} will be serialized to JSON. + * Default is {@code Null} which will serialize the {@link org.springframework.batch.core.JobParameters} + * to the default specified in SCDF's configuration. + */ + void executionRestart(long id, Boolean useJsonJobParameters); + /** * @return the list job executions without step executions known to the system. */ @@ -69,11 +79,10 @@ public interface JobOperations { */ PagedModel executionListByJobName(String jobName); - /** * Return the {@link JobExecutionResource} for the id specified. * - * @param id identifier of the job execution + * @param id identifier of the job execution * @return {@link JobExecutionResource} */ JobExecutionResource jobExecution(long id); @@ -81,7 +90,7 @@ public interface JobOperations { /** * Return the {@link JobInstanceResource} for the id specified. * - * @param id identifier of the job instasnce + * @param id identifier of the job instance * @return {@link JobInstanceResource} */ JobInstanceResource jobInstance(long id); @@ -98,7 +107,7 @@ public interface JobOperations { * Return StepExecutionProgressInfoResource for a specific job execution id and step * execution Id. * - * @param jobExecutionId the id of the job execution for the step to be returned. + * @param jobExecutionId the id of the job execution for the step to be returned. * @param stepExecutionId the id step execution to be returned. * @return the step execution progress info */ diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java index a88f635562..1254d5e2f0 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java @@ -26,6 +26,7 @@ import org.springframework.hateoas.RepresentationModel; import org.springframework.util.Assert; import org.springframework.web.client.RestTemplate; +import org.springframework.web.util.UriComponentsBuilder; /** * Implementation for {@link JobOperations}. @@ -34,11 +35,11 @@ */ public class JobTemplate implements JobOperations { - private static final String EXECUTIONS_RELATION = "jobs/thinexecutions"; - + private static final String EXECUTIONS_THIN_RELATION = "jobs/thinexecutions"; + private static final String EXECUTIONS_RELATION = "jobs/executions"; private static final String EXECUTION_RELATION = "jobs/executions/execution"; - - private static final String EXECUTION_RELATION_BY_NAME = "jobs/thinexecutions/name"; + private static final String EXECUTION_RELATION_BY_NAME = "jobs/executions/name"; + private static final String EXECUTION_THIN_RELATION_BY_NAME = "jobs/thinexecutions/name"; private static final String INSTANCE_RELATION = "jobs/instances/instance"; @@ -51,6 +52,9 @@ public class JobTemplate implements JobOperations { private final RestTemplate restTemplate; private final Link executionsLink; + private final Link thinExecutionsLink; + + private final Link thinExecutionByNameLink; private final Link executionLink; @@ -67,21 +71,25 @@ public class JobTemplate implements JobOperations { JobTemplate(RestTemplate restTemplate, RepresentationModel resources) { Assert.notNull(resources, "URI CollectionModel must not be be null"); Assert.notNull(restTemplate, "RestTemplate must not be null"); - Assert.notNull(resources.getLink(EXECUTIONS_RELATION), "Executions relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION), "Execution relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION_BY_NAME), "Execution by name relation is required"); - Assert.notNull(resources.getLink(INSTANCE_RELATION), "Instance relation is required"); - Assert.notNull(resources.getLink(INSTANCE_RELATION_BY_NAME), "Instance by name relation is required"); - Assert.notNull(resources.getLink(STEP_EXECUTION_RELATION_BY_ID), "Step Execution by id relation is required"); - Assert.notNull(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID), + Assert.isTrue(resources.getLink(EXECUTIONS_RELATION).isPresent(), "Executions relation is required"); + Assert.isTrue(resources.getLink(EXECUTIONS_THIN_RELATION).isPresent(), "Executions thin relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_THIN_RELATION_BY_NAME).isPresent(), "Executions thin relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_RELATION).isPresent(), "Execution relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_RELATION_BY_NAME).isPresent(), "Execution by name relation is required"); + Assert.isTrue(resources.getLink(INSTANCE_RELATION).isPresent(), "Instance relation is required"); + Assert.isTrue(resources.getLink(INSTANCE_RELATION_BY_NAME).isPresent(), "Instance by name relation is required"); + Assert.isTrue(resources.getLink(STEP_EXECUTION_RELATION_BY_ID).isPresent(), "Step Execution by id relation is required"); + Assert.isTrue(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID).isPresent(), "Step Execution Progress by id " + "relation is required"); - Assert.notNull(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID), + Assert.isTrue(resources.getLink(STEP_EXECUTION_PROGRESS_RELATION_BY_ID).isPresent(), "Step Execution View by id relation" + " is required"); this.restTemplate = restTemplate; this.executionsLink = resources.getLink(EXECUTIONS_RELATION).get(); + this.thinExecutionsLink = resources.getLink(EXECUTIONS_THIN_RELATION).get(); this.executionLink = resources.getLink(EXECUTION_RELATION).get(); this.executionByNameLink = resources.getLink(EXECUTION_RELATION_BY_NAME).get(); + this.thinExecutionByNameLink = resources.getLink(EXECUTION_THIN_RELATION_BY_NAME).get(); this.instanceLink = resources.getLink(INSTANCE_RELATION).get(); this.instanceByNameLink = resources.getLink(INSTANCE_RELATION_BY_NAME).get(); this.stepExecutionsLink = resources.getLink(STEP_EXECUTION_RELATION_BY_ID).get(); @@ -90,26 +98,29 @@ public class JobTemplate implements JobOperations { @Override public PagedModel executionList() { - String uriTemplate = executionsLink.getHref(); - uriTemplate = uriTemplate + "?size=2000"; - - return restTemplate.getForObject(uriTemplate, JobExecutionResource.Page.class); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionsLink.getHref()).queryParam("size", "2000"); + return restTemplate.getForObject(builder.toUriString(), JobExecutionResource.Page.class); } @Override public void executionRestart(long id) { - String uriTemplate = executionLink.expand(id).getHref(); - uriTemplate = uriTemplate + "?restart=true"; + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionLink.expand(id).getHref()).queryParam("restart", "true"); - restTemplate.put(uriTemplate, null); + restTemplate.put(builder.toUriString(), null); } @Override - public PagedModel executionThinList() { - String uriTemplate = executionsLink.getHref(); - uriTemplate = uriTemplate + "?size=2000"; + public void executionRestart(long id, Boolean useJsonJobParameters) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionLink.expand(id).getHref()).queryParam("restart", "true") + .queryParam("useJsonJobParameters", useJsonJobParameters); - return restTemplate.getForObject(uriTemplate, JobExecutionThinResource.Page.class); + restTemplate.put(builder.toUriString(), null); + } + + @Override + public PagedModel executionThinList() { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(thinExecutionsLink.getHref()).queryParam("size", "2000"); + return restTemplate.getForObject(builder.toUriString(), JobExecutionThinResource.Page.class); } @Override @@ -119,36 +130,38 @@ public PagedModel instanceList(String jobName) { @Override public PagedModel executionThinListByJobName(String jobName) { - return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), - JobExecutionThinResource.Page.class); + return restTemplate.getForObject(thinExecutionByNameLink.expand(jobName).getHref(), JobExecutionThinResource.Page.class); } @Override public PagedModel executionListByJobName(String jobName) { - return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), - JobExecutionResource.Page.class); + return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), JobExecutionResource.Page.class); } @Override public JobExecutionResource jobExecution(long id) { - return restTemplate.getForObject(executionLink.expand(id).getHref(), JobExecutionResource.class); + String url = executionLink.expand(id).getHref(); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(url); + + return restTemplate.getForObject(builder.toUriString(), JobExecutionResource.class); } @Override public JobInstanceResource jobInstance(long id) { - return restTemplate.getForObject(instanceLink.expand(id).getHref(), JobInstanceResource.class); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(instanceLink.expand(id).getHref()); + return restTemplate.getForObject(builder.toUriString(), JobInstanceResource.class); } @Override public PagedModel stepExecutionList(long jobExecutionId) { - return restTemplate.getForObject(stepExecutionsLink.expand(jobExecutionId).getHref(), - StepExecutionResource.Page.class); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(stepExecutionsLink.expand(jobExecutionId).getHref()); + return restTemplate.getForObject(builder.toUriString(), StepExecutionResource.Page.class); } @Override public StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId) { - return restTemplate.getForObject(stepExecutionProgressLink.expand(jobExecutionId, stepExecutionId).getHref(), - StepExecutionProgressInfoResource.class); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(stepExecutionProgressLink.expand(jobExecutionId, stepExecutionId).getHref()); + return restTemplate.getForObject(builder.toUriString(), StepExecutionProgressInfoResource.class); } } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java index c7fea197af..0771a174b9 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeOperations.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,15 +16,20 @@ package org.springframework.cloud.dataflow.rest.client; +import java.util.Map; + import org.springframework.cloud.dataflow.rest.resource.AppStatusResource; import org.springframework.cloud.dataflow.rest.resource.StreamStatusResource; import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpHeaders; /** * Defines operations available for obtaining information about deployed apps. * * @author Eric Bottard * @author Mark Fisher + * @author Chris Bono + * @author Corneil du Plessis */ public interface RuntimeOperations { @@ -44,4 +49,36 @@ public interface RuntimeOperations { * @return the runtime information about the deployed streams their apps and instances. */ PagedModel streamStatus(String... streamNames); + + /** + * Access an HTTP GET exposed actuator resource for a deployed app instance. + * + * @param appId the application id + * @param instanceId the application instance id + * @param endpoint the relative actuator path, e.g., {@code /info} + * @return the contents as JSON text + */ + String getFromActuator(String appId, String instanceId, String endpoint); + + /** + * Access an HTTP POST exposed actuator resource for a deployed app instance. + * + * @param appId the application id + * @param instanceId the application instance id + * @param endpoint the relative actuator path, e.g., {@code /info} + * @param data map representing the data to post on request body + * @return response from actuator + */ + Object postToActuator(String appId, String instanceId, String endpoint, Map data); + + /** + * Provides for POST to application HTTP endpoint exposed via url property. + * + * @param appId the application id + * @param instanceId the application instance id + * @param data data to send to url. The mimetype should be in the Content-Type header if important. + * @param headers post request headers. + * This method will return an exception + */ + void postToUrl(String appId, String instanceId, byte[] data, HttpHeaders headers); } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java index 1187398021..8a7ec49170 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,11 +16,27 @@ package org.springframework.cloud.dataflow.rest.client; +import java.time.Duration; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.springframework.cloud.dataflow.rest.resource.AppStatusResource; import org.springframework.cloud.dataflow.rest.resource.StreamStatusResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; import org.springframework.hateoas.Link; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; import org.springframework.web.client.RestTemplate; /** @@ -29,8 +45,11 @@ * @author Eric Bottard * @author Mark Fisher * @author Christian Tzolov + * @author Chris Bono + * @author Corneil du Plessis */ public class RuntimeTemplate implements RuntimeOperations { + private static final Logger logger = LoggerFactory.getLogger(RuntimeTemplate.class); private final RestTemplate restTemplate; @@ -44,6 +63,16 @@ public class RuntimeTemplate implements RuntimeOperations { */ private final Link appStatusUriTemplate; + /** + * Uri template for accessing actuator endpoint on a single app. + */ + private final Link appActuatorUriTemplate; + + /** + * Uri template for posting to app instance with url attribute. + */ + private final Link appUrlPostUriTemplate; + /** * Uri template for accessing runtime status of selected streams, their apps and instances. */ @@ -51,27 +80,104 @@ public class RuntimeTemplate implements RuntimeOperations { RuntimeTemplate(RestTemplate restTemplate, RepresentationModel resources) { this.restTemplate = restTemplate; - this.appStatusesUriTemplate = resources.getLink("runtime/apps").get(); - this.appStatusUriTemplate = resources.getLink("runtime/apps/{appId}").get(); - this.streamStatusUriTemplate = resources.getLink("runtime/streams/{streamNames}").get(); + this.appStatusesUriTemplate = getLink("runtime/apps", resources, true); + this.appStatusUriTemplate = getLink("runtime/apps/{appId}", resources, true); + this.streamStatusUriTemplate = getLink("runtime/streams/{streamNames}", resources, true); + this.appActuatorUriTemplate = getLink("runtime/apps/{appId}/instances/{instanceId}/actuator", resources, false); + this.appUrlPostUriTemplate = getLink("runtime/apps/{appId}/instances/{instanceId}/post", resources, false); + } + + private Link getLink(String relationPath, RepresentationModel resources, boolean required) { + Optional link = resources.getLink(relationPath); + if (required && !link.isPresent()) { + throw new RuntimeException("Unable to retrieve URI template for " + relationPath); + } + return link.orElse(null); } @Override public PagedModel status() { String uriTemplate = this.appStatusesUriTemplate.expand().getHref(); - uriTemplate = uriTemplate + "?size=2000"; + uriTemplate = uriTemplate + "?size=2000"; // TODO is this valid? return this.restTemplate.getForObject(uriTemplate, AppStatusResource.Page.class); } @Override public AppStatusResource status(String deploymentId) { - return this.restTemplate.getForObject(appStatusUriTemplate.expand(deploymentId).getHref(), AppStatusResource.class); + return this.restTemplate.getForObject( + appStatusUriTemplate.expand(deploymentId).getHref(), + AppStatusResource.class + ); } @Override public PagedModel streamStatus(String... streamNames) { - return this.restTemplate.getForObject(streamStatusUriTemplate.expand(streamNames).getHref(), - StreamStatusResource.Page.class); + return this.restTemplate.getForObject( + streamStatusUriTemplate.expand(streamNames).getHref(), + StreamStatusResource.Page.class + ); } + @Override + public String getFromActuator(String appId, String instanceId, String endpoint) { + Assert.notNull(appActuatorUriTemplate, "actuator endpoint not found"); + String uri = appActuatorUriTemplate.expand(appId, instanceId, endpoint).getHref(); + return this.restTemplate.getForObject(uri, String.class); + } + + @Override + public Object postToActuator(String appId, String instanceId, String endpoint, Map body) { + Assert.notNull(appActuatorUriTemplate, "actuator endpoint not found"); + String uri = appActuatorUriTemplate.expand(appId, instanceId).getHref(); + ActuatorPostRequest actuatorPostRequest = new ActuatorPostRequest(); + actuatorPostRequest.setEndpoint(endpoint); + actuatorPostRequest.setBody(body); + return this.restTemplate.postForObject(uri, actuatorPostRequest, Object.class); + } + + @Override + public void postToUrl(String appId, String instanceId, byte[] data, HttpHeaders headers) { + Assert.notNull(appUrlPostUriTemplate, "post endpoint not found"); + String uri = appUrlPostUriTemplate.expand(appId, instanceId).getHref(); + waitForUrl(uri, Duration.ofSeconds(30)); + HttpEntity entity = new HttpEntity<>(data, headers); + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("postToUrl:{}:{}:{}:{}", appId, instanceId, uri, sanitizer.sanitizeHeaders(headers)); + } + waitForUrl(uri, Duration.ofSeconds(30)); + ResponseEntity response = this.restTemplate.exchange(uri, HttpMethod.POST, entity, String.class); + if (!response.getStatusCode().is2xxSuccessful()) { + throw new RuntimeException("POST:exception:" + response.getStatusCode() + ":" + response.getBody()); + } + } + + private void waitForUrl(String uri, Duration timeout) { + // Check + final long waitUntilMillis = System.currentTimeMillis() + timeout.toMillis(); + do { + try { + Set allowed = this.restTemplate.optionsForAllow(uri); + if (!CollectionUtils.isEmpty(allowed)) { + break; + } + } catch (Throwable x) { + final String message = x.getMessage(); + if(message.contains("UnknownHostException")) { + logger.trace("waitForUrl:retry:exception:" + x); + continue; + } + if (message.contains("500")) { + break; + } else { + logger.trace("waitForUrl:exception:" + x); + } + } + try { + Thread.sleep(2000L); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } while (waitUntilMillis <= System.currentTimeMillis()); + } } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java index d1de94f656..a37a416a99 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java @@ -23,10 +23,12 @@ import javax.naming.OperationNotSupportedException; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.LauncherResource; import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; +import org.springframework.cloud.dataflow.rest.resource.TaskExecutionThinResource; import org.springframework.hateoas.PagedModel; /** @@ -36,6 +38,7 @@ * @author Michael Minella * @author Gunnar Hillert * @author David Turanski + * @author Corneil du Plessis */ public interface TaskOperations { @@ -53,8 +56,8 @@ public interface TaskOperations { /** * Create a new task definition * - * @param name the name of the task - * @param definition the task definition DSL + * @param name the name of the task + * @param definition the task definition DSL * @param description the description of the task definition * @return the task definition */ @@ -63,25 +66,25 @@ public interface TaskOperations { /** * Launch an already created task. * - * @param name the name of the task + * @param name the name of the task * @param properties the deployment properties - * @param arguments the command line arguments + * @param arguments the command line arguments * @return long containing the TaskExecutionId */ - long launch(String name, Map properties, List arguments); + LaunchResponseResource launch(String name, Map properties, List arguments); /** * Request the stop of a group {@link org.springframework.cloud.task.repository.TaskExecution}s. * - * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. */ void stop(String ids); /** * Request the stop of a group {@link org.springframework.cloud.task.repository.TaskExecution}s. * - * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. - * @param platform the platform name where the task is executing. + * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + * @param platform the platform name where the task is executing. */ void stop(String ids, String platform); @@ -95,7 +98,7 @@ public interface TaskOperations { /** * Destroy an existing task with the flag to cleanup task resources. * - * @param name the name of the task + * @param name the name of the task * @param cleanup flag indicates task execution cleanup */ void destroy(String name, boolean cleanup); @@ -105,6 +108,17 @@ public interface TaskOperations { */ PagedModel executionList(); + /** + * @return the list of thin task executions known to the system. + */ + PagedModel thinExecutionList(); + + /** + * List task executions filtered by task name. + * @return the page of task executions for the given task name. + */ + PagedModel thinExecutionListByTaskName(String taskName); + /** * List task executions known to the system filtered by task name. * @@ -116,7 +130,7 @@ public interface TaskOperations { /** * Return the {@link TaskExecutionResource} for the id specified. * - * @param id identifier of the task execution + * @param id identifier of the task execution * @return {@link TaskExecutionResource} */ TaskExecutionResource taskExecutionStatus(long id); @@ -133,7 +147,7 @@ public interface TaskOperations { * Return the task execution log. * * @param externalExecutionId the external execution identifier of the task execution. - * @param platform the platform from which to obtain the log. + * @param platform the platform from which to obtain the log. * @return {@link String} containing the log. */ String taskExecutionLog(String externalExecutionId, String platform); @@ -141,6 +155,7 @@ public interface TaskOperations { /** * Return information including the count of currently executing tasks and task execution * limits. + * * @return Collection of {@link CurrentTaskExecutionsResource} */ Collection currentTaskExecutions(); @@ -148,15 +163,15 @@ public interface TaskOperations { /** * Cleanup any resources associated with the execution for the id specified. * - * @param id identifier of the task execution + * @param id identifier of the task execution */ void cleanup(long id); /** * Cleanup any resources associated with the execution for the id specified. * - * @param id identifier of the task execution - * @param removeData delete the history of the execution + * @param id identifier of the task execution + * @param removeData delete the history of the execution */ void cleanup(long id, boolean removeData); @@ -165,19 +180,22 @@ public interface TaskOperations { * Cleanup any resources associated with the matching task executions. * * @param completed cleanup only completed task executions - * @param taskName the name of the task to cleanup, if null then all the tasks are considered. + * @param taskName the name of the task to cleanup, if null then all the tasks are considered. */ void cleanupAllTaskExecutions(boolean completed, String taskName); /** * Get the task executions count with the option to filter only the completed task executions. + * * @param completed cleanup only completed task executions - * @param taskName the name of the task to cleanup, if null then all the tasks are considered. + * @param taskName the name of the task to cleanup, if null then all the tasks are considered. + * @return the number of task executions. */ Integer getAllTaskExecutionsCount(boolean completed, String taskName); /** * Return the validation status for the tasks in an definition. + * * @param taskDefinitionName The name of the task definition to be validated. * @return {@link TaskAppStatusResource} containing the task app statuses. * @throws OperationNotSupportedException if the server does not support task validation @@ -186,7 +204,6 @@ public interface TaskOperations { /** * Destroy all existing tasks. - * */ void destroyAll(); } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java index 77c92127e3..be51914d2b 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,19 +20,25 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.stream.Stream; import javax.naming.OperationNotSupportedException; import org.springframework.cloud.dataflow.rest.client.support.VersionUtils; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.LauncherResource; import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; +import org.springframework.cloud.dataflow.rest.resource.TaskExecutionThinResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionsInfoResource; +import org.springframework.cloud.dataflow.rest.resource.about.AboutResource; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; import org.springframework.core.ParameterizedTypeReference; import org.springframework.hateoas.Link; +import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpMethod; import org.springframework.util.Assert; @@ -49,6 +55,7 @@ * @author Michael Minella * @author Gunnar Hillert * @author David Turanski + * @author Corneil du Plessis */ public class TaskTemplate implements TaskOperations { @@ -56,16 +63,20 @@ public class TaskTemplate implements TaskOperations { private static final String DEFINITION_RELATION = "tasks/definitions/definition"; - private static final String EXECUTIONS_CURRENT_RELATION_VERSION = "1.7.0"; - - private static final String VALIDATION_RELATION_VERSION = "1.7.0"; + private static final String VALIDATION_MIN_VERSION = "3.0.0-SNAPSHOT"; private static final String EXECUTIONS_RELATION = "tasks/executions"; + private static final String THIN_EXECUTIONS_RELATION = "tasks/thinexecutions"; + + private static final String THIN_EXECUTIONS_BY_NAME_RELATION = "tasks/thinexecutions/name"; + private static final String EXECUTIONS_CURRENT_RELATION = "tasks/executions/current"; private static final String EXECUTION_RELATION = "tasks/executions/execution"; + private static final String EXECUTION_LAUNCH_RELATION = "tasks/executions/launch"; + private static final String EXECUTION_RELATION_BY_NAME = "tasks/executions/name"; private static final String EXECUTIONS_INFO_RELATION = "tasks/info/executions"; @@ -84,8 +95,14 @@ public class TaskTemplate implements TaskOperations { private final Link executionsLink; + private final Link thinExecutionsLink; + + private final Link thinExecutionsByNameLink; + private final Link executionLink; + private final Link executionLaunchLink; + private final Link executionByNameLink; private final Link executionsCurrentLink; @@ -97,45 +114,56 @@ public class TaskTemplate implements TaskOperations { private final Link platformListLink; private final String dataFlowServerVersion; + private String actualDataFlowServerCoreVersion = null; private final Link retrieveLogLink; - + private final Link aboutLink; TaskTemplate(RestTemplate restTemplate, RepresentationModel resources, String dataFlowServerVersion) { Assert.notNull(resources, "URI CollectionModel must not be be null"); - Assert.notNull(resources.getLink(EXECUTIONS_RELATION), "Executions relation is required"); - Assert.notNull(resources.getLink(DEFINITIONS_RELATION), "Definitions relation is required"); - Assert.notNull(resources.getLink(DEFINITION_RELATION), "Definition relation is required"); Assert.notNull(restTemplate, "RestTemplate must not be null"); - Assert.notNull(resources.getLink(EXECUTIONS_RELATION), "Executions relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION), "Execution relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION_BY_NAME), "Execution by name relation is required"); Assert.notNull(dataFlowServerVersion, "dataFlowVersion must not be null"); - Assert.notNull(resources.getLink(RETRIEVE_LOG), "Log relation is required"); - + Assert.isTrue(resources.getLink("about").isPresent(), "Expected about relation"); + Stream.of( + "about", + DEFINITIONS_RELATION, + DEFINITION_RELATION, + EXECUTIONS_RELATION, + EXECUTION_RELATION, + EXECUTION_RELATION_BY_NAME, + EXECUTIONS_INFO_RELATION, + PLATFORM_LIST_RELATION, + RETRIEVE_LOG, + VALIDATION_REL, + EXECUTIONS_CURRENT_RELATION, + THIN_EXECUTIONS_RELATION, + THIN_EXECUTIONS_BY_NAME_RELATION, + EXECUTION_LAUNCH_RELATION + ).forEach(relation -> { + Assert.isTrue(resources.getLink(relation).isPresent(), () -> relation + " relation is required"); + }); this.dataFlowServerVersion = dataFlowServerVersion; + this.restTemplate = restTemplate; - if (VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion( - VersionUtils.getThreePartVersion(dataFlowServerVersion), - VALIDATION_RELATION_VERSION)) { - Assert.notNull(resources.getLink(VALIDATION_REL), "Validiation relation for tasks is required"); - } - - if (VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion( - VersionUtils.getThreePartVersion(dataFlowServerVersion), - EXECUTIONS_CURRENT_RELATION_VERSION)) { - Assert.notNull(resources.getLink(EXECUTIONS_CURRENT_RELATION), "Executions current relation is required"); + String version = VersionUtils.getThreePartVersion(dataFlowServerVersion); + if (StringUtils.hasText(version)) { + Assert.isTrue( + VersionUtils.isDataFlowServerVersionGreaterThanOrEqualToRequiredVersion(version, + VALIDATION_MIN_VERSION), + () -> "Minimum Data Flow version required is " + VALIDATION_MIN_VERSION + " but got " + version); } + this.executionsCurrentLink = resources.getLink(EXECUTIONS_CURRENT_RELATION).get(); + this.aboutLink = resources.getLink("about").get(); - this.restTemplate = restTemplate; this.definitionsLink = resources.getLink(DEFINITIONS_RELATION).get(); this.definitionLink = resources.getLink(DEFINITION_RELATION).get(); this.executionsLink = resources.getLink(EXECUTIONS_RELATION).get(); this.executionLink = resources.getLink(EXECUTION_RELATION).get(); + + this.thinExecutionsLink = resources.getLink(THIN_EXECUTIONS_RELATION).get(); + this.thinExecutionsByNameLink = resources.getLink(THIN_EXECUTIONS_BY_NAME_RELATION).get(); + this.executionLaunchLink = resources.getLink(EXECUTION_LAUNCH_RELATION).get(); this.executionByNameLink = resources.getLink(EXECUTION_RELATION_BY_NAME).get(); - this.executionsCurrentLink = resources.getLink(EXECUTIONS_CURRENT_RELATION).get(); - if (resources.getLink(EXECUTIONS_INFO_RELATION).isPresent()) { - this.executionsInfoLink = resources.getLink(EXECUTIONS_INFO_RELATION).get(); - } + this.executionsInfoLink = resources.getLink(EXECUTIONS_INFO_RELATION).get(); this.validationLink = resources.getLink(VALIDATION_REL).get(); this.platformListLink = resources.getLink(PLATFORM_LIST_RELATION).get(); this.retrieveLogLink = resources.getLink(RETRIEVE_LOG).get(); @@ -157,33 +185,36 @@ public LauncherResource.Page listPlatforms() { @Override public TaskDefinitionResource create(String name, String definition, String description) { - MultiValueMap values = new LinkedMultiValueMap(); + MultiValueMap values = new LinkedMultiValueMap<>(); values.add("name", name); values.add("definition", definition); values.add("description", description); - return restTemplate.postForObject(definitionsLink.expand().getHref(), values, - TaskDefinitionResource.class); + return restTemplate.postForObject(definitionsLink.expand().getHref(), values, + TaskDefinitionResource.class); } @Override - public long launch(String name, Map properties, List arguments) { + public LaunchResponseResource launch(String name, Map properties, List arguments) { MultiValueMap values = new LinkedMultiValueMap<>(); - values.add("properties", DeploymentPropertiesUtils.format(properties)); - values.add("arguments", StringUtils.collectionToDelimitedString(arguments, " ")); - return restTemplate.postForObject(executionByNameLink.expand(name).getHref(), values, Long.class, name); + String formattedProperties = DeploymentPropertiesUtils.format(properties); + String commandLineArguments = StringUtils.collectionToDelimitedString(arguments, " "); + values.add("properties", formattedProperties); + values.add("arguments", commandLineArguments); + String url = executionLaunchLink.expand(name).getHref(); + return restTemplate.postForObject(url, values, LaunchResponseResource.class); } @Override public void stop(String ids) { MultiValueMap values = new LinkedMultiValueMap<>(); - restTemplate.postForLocation(executionLink.expand(ids).getHref(),values); + restTemplate.postForLocation(executionLink.expand(ids).getHref(), values); } @Override public void stop(String ids, String platform) { MultiValueMap values = new LinkedMultiValueMap<>(); values.add("platform", platform); - restTemplate.postForLocation(executionLink.expand(ids).getHref(),values); + restTemplate.postForLocation(executionLink.expand(ids).getHref(), values); } @Override @@ -207,15 +238,36 @@ public TaskExecutionResource.Page executionList() { return restTemplate.getForObject(executionsLink.getHref(), TaskExecutionResource.Page.class); } + @Override + public PagedModel thinExecutionList() { + if(thinExecutionsLink != null) { + return restTemplate.getForObject(thinExecutionsLink.getHref(), TaskExecutionThinResource.Page.class); + } else { + return restTemplate.getForObject(executionsLink.getHref(), TaskExecutionThinResource.Page.class); + } + } + + @Override + public PagedModel thinExecutionListByTaskName(String taskName) { + if(thinExecutionsByNameLink != null) { + return restTemplate.getForObject(thinExecutionsByNameLink.expand(taskName).getHref(), TaskExecutionThinResource.Page.class); + } else { + return restTemplate.getForObject(executionByNameLink.expand(taskName).getHref(), TaskExecutionThinResource.Page.class); + } + } + @Override public TaskExecutionResource.Page executionListByTaskName(String taskName) { return restTemplate.getForObject(executionByNameLink.expand(taskName).getHref(), - TaskExecutionResource.Page.class); + TaskExecutionResource.Page.class); } @Override public TaskExecutionResource taskExecutionStatus(long id) { - return restTemplate.getForObject(executionLink.expand(id).getHref(), TaskExecutionResource.class); + MultiValueMap values = new LinkedMultiValueMap<>(); + values.add("id", id); + String url = executionLink.expand(values).getHref(); + return restTemplate.getForObject(url, TaskExecutionResource.class); } @Override @@ -225,8 +277,8 @@ public String taskExecutionLog(String externalExecutionId) { @Override public String taskExecutionLog(String externalExecutionId, String platform) { - Map map = new HashMap<>(); - map.put("taskExternalExecutionId",externalExecutionId); + Map map = new HashMap<>(); + map.put("taskExternalExecutionId", externalExecutionId); map.put("platformName", platform); return restTemplate.getForObject(retrieveLogLink.expand(map).getHref(), String.class); } @@ -235,9 +287,9 @@ public String taskExecutionLog(String externalExecutionId, String platform) { public Collection currentTaskExecutions() { ParameterizedTypeReference> typeReference = new ParameterizedTypeReference>() { - }; + }; return restTemplate - .exchange(executionsCurrentLink.getHref(),HttpMethod.GET,null, typeReference).getBody(); + .exchange(executionsCurrentLink.getHref(), HttpMethod.GET, null, typeReference).getBody(); } @Override @@ -246,12 +298,16 @@ public void cleanup(long id) { } @Override - public void cleanup(long id, boolean removeData) { - String uriTemplate = executionLink.expand(id).getHref(); - if (removeData) { - uriTemplate = uriTemplate + "?action=CLEANUP,REMOVE_DATA"; - } - restTemplate.delete(uriTemplate); + public void cleanup(long id, boolean removeData) { + MultiValueMap values = new LinkedMultiValueMap<>(); + + String uriTemplate = executionLink.expand(id).getHref(); + + if (removeData) { + uriTemplate = uriTemplate + "?action=CLEANUP,REMOVE_DATA"; + } + + restTemplate.delete(uriTemplate); } @Override @@ -268,13 +324,13 @@ public void cleanupAllTaskExecutions(boolean completed, String taskName) { @Override public Integer getAllTaskExecutionsCount(boolean completed, String taskName) { - Map map = new HashMap<>(); + Map map = new HashMap<>(); map.put("completed", String.valueOf(completed)); map.put("name", StringUtils.hasText(taskName) ? taskName : ""); if (this.executionsInfoLink != null) { - return restTemplate - .getForObject(this.executionsInfoLink.expand(map).getHref(), TaskExecutionsInfoResource.class) - .getTotalExecutions(); + return Objects.requireNonNull( + restTemplate.getForObject(this.executionsInfoLink.expand(map).getHref(), TaskExecutionsInfoResource.class) + ).getTotalExecutions(); } // for backwards-compatibility return zero count return 0; @@ -283,10 +339,10 @@ public Integer getAllTaskExecutionsCount(boolean completed, String taskName) { @Override public TaskAppStatusResource validateTaskDefinition(String taskDefinitionName) - throws OperationNotSupportedException { + throws OperationNotSupportedException { if (validationLink == null) { throw new OperationNotSupportedException("Task Validation not supported on Data Flow Server version " - + dataFlowServerVersion); + + dataFlowServerVersion); } String uriTemplate = this.validationLink.expand(taskDefinitionName).getHref(); return restTemplate.getForObject(uriTemplate, TaskAppStatusResource.class); diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java index 0ddaf5b151..bef770165d 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java @@ -16,19 +16,19 @@ package org.springframework.cloud.dataflow.rest.client.config; import java.net.URI; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties; -import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientPropertiesRegistrationAdapter; +import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientPropertiesMapper; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.common.security.core.support.OAuth2AccessTokenProvidingClientHttpRequestInterceptor; import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; @@ -59,6 +59,7 @@ import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; import org.springframework.security.oauth2.client.registration.InMemoryClientRegistrationRepository; import org.springframework.security.oauth2.core.AuthorizationGrantType; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import org.springframework.web.client.RestTemplate; @@ -67,7 +68,7 @@ * @author Vinicius Carvalho * @author Gunnar Hillert */ -@Configuration +@AutoConfiguration @EnableConfigurationProperties(DataFlowClientProperties.class) public class DataFlowClientAutoConfiguration { @@ -78,7 +79,6 @@ public class DataFlowClientAutoConfiguration { @Autowired private DataFlowClientProperties properties; - @Autowired(required = false) private RestTemplate restTemplate; @Autowired @@ -90,9 +90,13 @@ public class DataFlowClientAutoConfiguration { @Autowired private @Nullable OAuth2ClientProperties oauth2ClientProperties; + public DataFlowClientAutoConfiguration(@Nullable RestTemplate restTemplate) { + this.restTemplate = restTemplate; + } + @Bean @ConditionalOnMissingBean(DataFlowOperations.class) - public DataFlowOperations dataFlowOperations() throws Exception{ + public DataFlowOperations dataFlowOperations(@Nullable ObjectMapper mapper) throws Exception{ RestTemplate template = DataFlowTemplate.prepareRestTemplate(restTemplate); final HttpClientConfigurer httpClientConfigurer = HttpClientConfigurer.create(new URI(properties.getServerUri())) .skipTlsCertificateVerification(properties.isSkipSslValidation()); @@ -107,8 +111,8 @@ else if (StringUtils.hasText(this.properties.getAuthentication().getClientId())) clientRegistrations, this.properties.getAuthentication().getClientId())); logger.debug("Configured OAuth2 Client Credentials for accessing the Data Flow Server"); } - else if(!StringUtils.isEmpty(properties.getAuthentication().getBasic().getUsername()) && - !StringUtils.isEmpty(properties.getAuthentication().getBasic().getPassword())){ + else if(!ObjectUtils.isEmpty(properties.getAuthentication().getBasic().getUsername()) && + !ObjectUtils.isEmpty(properties.getAuthentication().getBasic().getPassword())){ httpClientConfigurer.basicAuthCredentials(properties.getAuthentication().getBasic().getUsername(), properties.getAuthentication().getBasic().getPassword()); template.setRequestFactory(httpClientConfigurer.buildClientHttpRequestFactory()); @@ -127,7 +131,7 @@ else if (oauth2ClientProperties != null && !oauth2ClientProperties.getRegistrati logger.debug("Not configuring security for accessing the Data Flow Server"); } - return new DataFlowTemplate(new URI(properties.getServerUri()), template); + return new DataFlowTemplate(new URI(properties.getServerUri()), template, mapper); } @Bean @@ -137,7 +141,7 @@ public StreamBuilder streamBuilder(DataFlowOperations dataFlowOperations){ } @ConditionalOnProperty(prefix = DataFlowPropertyKeys.PREFIX + "client.authentication", name = "client-id") - @Configuration + @Configuration(proxyBeanMethods = false) static class ClientCredentialsConfiguration { @Bean @@ -185,9 +189,8 @@ private ClientHttpRequestInterceptor clientCredentialsTokenResolvingInterceptor( private static final Authentication DEFAULT_PRINCIPAL = createAuthentication("dataflow-client-principal"); private ClientRegistrationRepository shellClientRegistrationRepository(OAuth2ClientProperties properties) { - List registrations = new ArrayList<>( - OAuth2ClientPropertiesRegistrationAdapter.getClientRegistrations(properties).values()); - return new InMemoryClientRegistrationRepository(registrations); + var oauthClientPropsMapper = new OAuth2ClientPropertiesMapper(properties); + return new InMemoryClientRegistrationRepository(oauthClientPropsMapper.asClientRegistrations().values().stream().toList()); } private OAuth2AuthorizedClientService shellAuthorizedClientService(ClientRegistrationRepository shellClientRegistrationRepository) { diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java index 0341048733..26cd78ac93 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/Stream.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; @@ -178,7 +179,7 @@ public Map history() { Collection history = this.client.streamOperations().history(this.name); return history.stream().collect(Collectors.toMap( Release::getVersion, - r -> r.getInfo().getStatus().getStatusCode().toString().toLowerCase())); + r -> r.getInfo().getStatus().getStatusCode().toString().toLowerCase(Locale.ROOT))); } /** @@ -217,6 +218,10 @@ public String logs(StreamApplication app) { return this.client.streamOperations().streamExecutionLog(this.name, appDeploymentId); } + /** + * @return Returns a map of the stream applications, associating every application with its applications instances + * and their current runtime states: {@code (App -> (AppInstanceId -> AppInstanceState))}. + */ public Map> runtimeApps() { StreamStatusResource streamStatus = client.runtimeOperations() diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java index 9f3db92b7a..fcf3f23f60 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java @@ -22,6 +22,7 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -29,7 +30,10 @@ import org.springframework.cloud.dataflow.rest.client.JobOperations; import org.springframework.cloud.dataflow.rest.client.TaskOperations; import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; +import org.springframework.cloud.dataflow.rest.resource.JobExecutionThinResource; import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; +import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionStatus; @@ -38,7 +42,7 @@ /** * Represents a Task defined on DataFlow server. New Task can be defined with the help of a fluent style builder * pattern or use the {@link Task} static utility methods to retrieve existing tasks already defined in DataFlow. - * + *

    * For for instance you can define a new task like this: *

      *     {@code
    @@ -49,7 +53,7 @@
      *              .build();
      *     }
      * 
    - * + *

    * Next you can launch the task and inspect the executions result. Mind that the task is run asynchronously. *

      *     import org.awaitility.Awaitility;
    @@ -64,7 +68,7 @@
      *          task.executions().forEach( execution -> System.out.println(execution.getExitCode()));
      *     }
      * 
    - * + *

    * Use

    {@code close()}
    to destroy the task manually. Since tasks are auto-closable you can use the * Java try block instead: *
    @@ -81,7 +85,7 @@
      *          } // Task is destroyed.
      *     }
      * 
    - * + *

    * Use the {@link TaskBuilder#allTasks()} and {@link TaskBuilder#findByName(String)} * static helper methods to list or retrieve existing tasks defined in DataFlow. * @@ -89,8 +93,11 @@ */ public class Task implements AutoCloseable { private final String taskName; + private final TaskOperations taskOperations; + private final JobOperations jobOperations; + private final DataFlowOperations dataFlowOperations; Task(String taskName, DataFlowOperations dataFlowOperations) { @@ -106,6 +113,7 @@ public class Task implements AutoCloseable { /** * Fluent API method to create a {@link TaskBuilder}. + * * @param dataFlowOperations {@link DataFlowOperations} Data Flow Rest client instance. * @return A fluent style builder to create tasks. */ @@ -115,28 +123,31 @@ public static TaskBuilder builder(DataFlowOperations dataFlowOperations) { /** * Launch a task without properties or arguments. + * * @return long containing the TaskExecutionId */ - public long launch() { - return this.launch(Collections.EMPTY_LIST); + public LaunchResponseResource launch() { + return this.launch(Collections.emptyList()); } /** * Launch a task with command line arguments. + * * @param arguments the command line arguments. * @return long containing the TaskExecutionId */ - public long launch(List arguments) { - return this.launch(Collections.EMPTY_MAP, arguments); + public LaunchResponseResource launch(List arguments) { + return this.launch(Collections.emptyMap(), arguments); } /** * Launch a task with deployment properties and command line arguments. + * * @param properties the deployment properties. - * @param arguments the command line arguments. + * @param arguments the command line arguments. * @return long containing the TaskExecutionId */ - public long launch(Map properties, List arguments) { + public LaunchResponseResource launch(Map properties, List arguments) { if (properties == null) { throw new IllegalArgumentException("Task properties can't be null!"); } @@ -145,31 +156,31 @@ public long launch(Map properties, List arguments) { /** * Stop all Tasks' running {@link org.springframework.cloud.task.repository.TaskExecution}s. - * + *

    * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! */ public void stop() { - String commaSeparatedIds = executions().stream() - .filter(Objects::nonNull) - .filter(e -> e.getTaskExecutionStatus() == TaskExecutionStatus.RUNNING) - .map(TaskExecutionResource::getExecutionId) - .map(String::valueOf) - .collect(Collectors.joining(",")); - if (StringUtils.hasText(commaSeparatedIds)) { - this.taskOperations.stop(commaSeparatedIds); - } + Set idTargets = executions().stream() + .filter(Objects::nonNull) + .filter(e -> e.getTaskExecutionStatus() == TaskExecutionStatus.RUNNING) + .collect(Collectors.toSet()); + String ids = idTargets.stream() + .map(taskExecutionResource -> String.valueOf(taskExecutionResource.getExecutionId())) + .collect(Collectors.joining(",")); + this.taskOperations.stop(ids); } /** * Stop a list of {@link org.springframework.cloud.task.repository.TaskExecution}s. - * @param taskExecutionIds List of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. * - * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! + * @param taskExecutionIds List of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + *

    + * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! */ public void stop(long... taskExecutionIds) { String commaSeparatedIds = Stream.of(taskExecutionIds) - .map(String::valueOf) - .collect(Collectors.joining(",")); + .map(String::valueOf) + .collect(Collectors.joining(",")); if (StringUtils.hasText(commaSeparatedIds)) { this.taskOperations.stop(commaSeparatedIds); } @@ -188,6 +199,7 @@ public void destroy() { /** * List task executions for this task. + * * @return List of task executions for the given task. */ public Collection executions() { @@ -196,37 +208,37 @@ public Collection executions() { /** * Retrieve task execution by Id. - * @param executionId Task execution Id + * + * @param executionId Task execution Id * @return Task executions for the given task execution id. */ public Optional execution(long executionId) { - return this.executions().stream() - .filter(Objects::nonNull) - .filter(e -> e.getExecutionId() == executionId) - .findFirst(); + return Optional.ofNullable(this.taskOperations.taskExecutionStatus(executionId)); } /** * Find {@link TaskExecutionResource} by a parent execution id. + * * @param parentExecutionId parent task execution id. * @return Return TaskExecutionResource */ public Optional executionByParentExecutionId(long parentExecutionId) { return this.executions().stream() - .filter(Objects::nonNull) - .filter(e -> e.getParentExecutionId() == parentExecutionId) - .findFirst(); + .filter(Objects::nonNull) + .filter(e -> e.getParentExecutionId() == parentExecutionId) + .findFirst(); } /** * Task execution status - * @param executionId execution Id + * + * @param executionId execution Id. * @return returns the task execution status. */ public TaskExecutionStatus executionStatus(long executionId) { return this.execution(executionId) - .map(TaskExecutionResource::getTaskExecutionStatus) - .orElse(TaskExecutionStatus.UNKNOWN); + .map(TaskExecutionResource::getTaskExecutionStatus) + .orElse(TaskExecutionStatus.UNKNOWN); } /** @@ -241,12 +253,12 @@ public boolean isComposed() { */ public List composedTaskChildTasks() { return !isComposed() ? - new ArrayList<>() : - this.taskOperations.list().getContent().stream() - .filter(Objects::nonNull) - .filter(t -> t.getName().startsWith(this.taskName + "-")) - .map(t -> new Task(t.getName(), this.dataFlowOperations)) - .collect(Collectors.toList()); + new ArrayList<>() : + this.taskOperations.list().getContent().stream() + .filter(Objects::nonNull) + .filter(t -> t.getName().startsWith(this.taskName + "-")) + .map(t -> new Task(t.getName(), this.dataFlowOperations)) + .collect(Collectors.toList()); } /** @@ -255,7 +267,7 @@ public List composedTaskChildTasks() { */ public Optional composedTaskChildTaskByLabel(String childTaskLabel) { return this.composedTaskChildTasks().stream() - .filter(childTask -> childTask.getTaskName().endsWith("-" + childTaskLabel)).findFirst(); + .filter(childTask -> childTask.getTaskName().endsWith("-" + childTaskLabel)).findFirst(); } @@ -270,6 +282,18 @@ public Collection jobExecutionResources() { return this.jobOperations.executionListByJobName(this.taskName).getContent(); } + public Collection thinkJobExecutionResources() { + return this.jobOperations.executionThinListByJobName(this.taskName).getContent(); + } + + /** + * @param jobExecutionId the job execution id. + * @return Returns list of {@link StepExecutionResource} belonging to the job. + */ + public Collection jobStepExecutions(long jobExecutionId) { + return this.jobOperations.stepExecutionList(jobExecutionId).getContent(); + } + /** * @return Returns list of {@link JobInstanceResource} belonging to this task. */ @@ -279,9 +303,9 @@ public Collection jobInstanceResources() { private Optional definitionResource() { return this.taskOperations.list().getContent().stream() - .filter(Objects::nonNull) - .filter(t -> t.getName().equals(this.taskName)) - .findFirst(); + .filter(Objects::nonNull) + .filter(t -> t.getName().equals(this.taskName)) + .findFirst(); } /** @@ -295,4 +319,36 @@ public String getTaskName() { public void close() { destroy(); } + + //-------------------------------------------------------------------------------------------------------- + // TASK EXECUTION CLEANUP + //-------------------------------------------------------------------------------------------------------- + + /** + * Remove specified task execution for the specified task execution id. + * + * @param taskExecutionId the id of the task execution to be removed. + */ + public void cleanupTaskExecution(long taskExecutionId) { + this.taskOperations.cleanup(taskExecutionId, true); + } + + /** + * Remove all task executions. + */ + public void cleanupAllTaskExecutions() { + this.taskOperations.cleanupAllTaskExecutions(false, null); + } + + /** + * Retrieve task executions for child task name associated with this task's instance. + * + * @param childTaskName to be used to search for the associated task executions. + * @return List of task executions for the given child task. + */ + public Optional composedTaskChildExecution(String childTaskName) { + Collection taskExecutions = taskOperations.executionListByTaskName(this.taskName + "-" + childTaskName).getContent(); + return (taskExecutions.size() == 1) ? Optional.of((TaskExecutionResource) taskExecutions.stream().toArray()[0]) : Optional.empty(); + } + } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/TaskSchedule.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/TaskSchedule.java index 9279f4ce54..ade6996f40 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/TaskSchedule.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/TaskSchedule.java @@ -47,7 +47,7 @@ */ public class TaskSchedule implements AutoCloseable { - public static final String CRON_EXPRESSION_KEY = "scheduler.cron.expression"; + public static final String CRON_EXPRESSION_KEY = "deployer.cron.expression"; private final String scheduleName; diff --git a/spring-cloud-dataflow-rest-client/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-rest-client/src/main/resources/META-INF/spring.factories deleted file mode 100644 index 60509a5f3e..0000000000 --- a/spring-cloud-dataflow-rest-client/src/main/resources/META-INF/spring.factories +++ /dev/null @@ -1,2 +0,0 @@ -org.springframework.boot.autoconfigure.EnableAutoConfiguration=\ - org.springframework.cloud.dataflow.rest.client.config.DataFlowClientAutoConfiguration diff --git a/spring-cloud-dataflow-rest-client/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-dataflow-rest-client/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 0000000000..9078ad8062 --- /dev/null +++ b/spring-cloud-dataflow-rest-client/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1 @@ +org.springframework.cloud.dataflow.rest.client.config.DataFlowClientAutoConfiguration \ No newline at end of file diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java index 121459f91f..b7f0ff8b7e 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataFlowClientAutoConfigurationTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,12 +17,12 @@ import java.util.Collections; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; import org.springframework.cloud.dataflow.rest.client.config.DataFlowClientProperties; import org.springframework.cloud.dataflow.rest.client.dsl.StreamBuilder; import org.springframework.context.ConfigurableApplicationContext; @@ -30,19 +30,22 @@ import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.web.client.RestTemplate; +import static org.assertj.core.api.Assertions.assertThat; + /** * @author Vinicius Carvalho + * @author Corneil du Plessis */ -public class DataFlowClientAutoConfigurationTests { +class DataFlowClientAutoConfigurationTests { @Test - public void contextLoads() throws Exception { + void contextLoads() throws Exception { ConfigurableApplicationContext applicationContext = SpringApplication.run(TestApplication.class, "--spring.cloud.dataflow.client.enableDsl=true", "--spring.autoconfigure.exclude=org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration,org.springframework.cloud.deployer.spi.kubernetes.KubernetesAutoConfiguration"); - Assert.assertNotNull(applicationContext.getBean(DataFlowTemplate.class)); - Assert.assertNotNull(applicationContext.getBean(StreamBuilder.class)); + assertThat(applicationContext.getBean(DataFlowTemplate.class)).isNotNull(); + assertThat(applicationContext.getBean(StreamBuilder.class)).isNotNull(); RestTemplate template = applicationContext.getBean(RestTemplate.class); //No auth Mockito.verify(template, Mockito.times(0)).setRequestFactory(Mockito.any()); @@ -50,24 +53,24 @@ public void contextLoads() throws Exception { } @Test - public void usingAuthentication() throws Exception { + void usingAuthentication() throws Exception { ConfigurableApplicationContext applicationContext = SpringApplication.run(TestApplication.class, "--spring.cloud.dataflow.client.authentication.basic.username=foo", "--spring.cloud.dataflow.client.authentication.basic.password=bar", "--spring.autoconfigure.exclude=org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration,org.springframework.cloud.deployer.spi.kubernetes.KubernetesAutoConfiguration"); - Assert.assertNotNull(applicationContext.getBean(DataFlowTemplate.class)); - Assert.assertNotNull(applicationContext.getBean(StreamBuilder.class)); + assertThat(applicationContext.getBean(DataFlowTemplate.class)).isNotNull(); + assertThat(applicationContext.getBean(StreamBuilder.class)).isNotNull(); RestTemplate template = applicationContext.getBean(RestTemplate.class); DataFlowClientProperties properties = applicationContext.getBean(DataFlowClientProperties.class); - Assert.assertNotNull(properties.getAuthentication()); - Assert.assertEquals("foo", properties.getAuthentication().getBasic().getUsername()); - Assert.assertEquals("bar", properties.getAuthentication().getBasic().getPassword()); + assertThat(properties.getAuthentication()).isNotNull(); + assertThat(properties.getAuthentication().getBasic().getUsername()).isEqualTo("foo"); + assertThat(properties.getAuthentication().getBasic().getPassword()).isEqualTo("bar"); Mockito.verify(template, Mockito.times(1)).setRequestFactory(Mockito.any()); applicationContext.close(); } - @SpringBootApplication + @SpringBootApplication(exclude= {DataSourceAutoConfiguration.class}) static class TestApplication { @Bean diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java index 81d8eaac51..f62a0054e7 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowClientExceptionTests.java @@ -15,28 +15,29 @@ */ package org.springframework.cloud.dataflow.rest.client; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.hateoas.Link; import org.springframework.hateoas.mediatype.vnderrors.VndErrors; import org.springframework.hateoas.mediatype.vnderrors.VndErrors.VndError; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; /** * @author Gunnar Hillert + * @author Corneil du Plessis */ -public class DataflowClientExceptionTests { +class DataflowClientExceptionTests { @Test - public void testCreationOfDataflowClientExceptionWithNullError() { + void creationOfDataflowClientExceptionWithNullError() { try { new DataFlowClientException(null); } catch (IllegalArgumentException e) { - assertEquals("The provided vndErrors parameter must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The provided vndErrors parameter must not be null."); return; } @@ -44,19 +45,19 @@ public void testCreationOfDataflowClientExceptionWithNullError() { } @Test - public void testCreationOfDataflowClientExceptionWithSingleError() { - final VndErrors errors = new VndErrors("foo", "bar message", new Link("somewhere")); + void creationOfDataflowClientExceptionWithSingleError() { + final VndErrors errors = new VndErrors("foo", "bar message", Link.of("somewhere")); final DataFlowClientException dataFlowClientException = new DataFlowClientException(errors); - assertEquals("bar message", dataFlowClientException.getMessage()); + assertThat(dataFlowClientException.getMessage()).isEqualTo("bar message"); } @Test - public void testCreationOfDataflowClientExceptionWithMultipleErrors() { - final VndError vndError1 = new VndError("foo logref", "foo message", new Link("foo link")); - final VndError vndError2 = new VndError("bar logref", "bar message", new Link("bar link")); + void creationOfDataflowClientExceptionWithMultipleErrors() { + final VndError vndError1 = new VndError("foo logref", "foo message", Link.of("foo link")); + final VndError vndError2 = new VndError("bar logref", "bar message", Link.of("bar link")); final VndErrors errors = new VndErrors(vndError1, vndError2); final DataFlowClientException dataFlowClientException = new DataFlowClientException(errors); - assertEquals("foo message\nbar message", dataFlowClientException.getMessage()); + assertThat(dataFlowClientException.getMessage()).isEqualTo("foo message\nbar message"); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java index 9cf3fcc411..9b64161f76 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java @@ -23,9 +23,11 @@ import java.util.Optional; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; @@ -37,18 +39,18 @@ import org.springframework.cloud.dataflow.rest.Version; import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; import org.springframework.cloud.dataflow.rest.resource.RootResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.hateoas.Link; import org.springframework.hateoas.LinkRelation; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.web.client.ResourceAccessException; import org.springframework.web.client.RestTemplate; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; @@ -57,63 +59,73 @@ /** * @author Gunnar Hillert * @author Glenn Renfro + * @author Corneil du Plessis */ -public class DataflowTemplateTests { +class DataflowTemplateTests { - @Before - public void setup() { + private ObjectMapper mapper; + + @BeforeEach + void setup() { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); System.setProperty("sun.net.client.defaultConnectTimeout", String.valueOf(100)); } - @After - public void shutdown() { + @AfterEach + void shutdown() { System.clearProperty("sun.net.client.defaultConnectTimeout"); } @Test - public void testDataFlowTemplateContructorWithNullUri() throws URISyntaxException { + void dataFlowTemplateContructorWithNullUri() throws URISyntaxException { try { - new DataFlowTemplate(null); + new DataFlowTemplate(null, mapper); } catch (IllegalArgumentException e) { - assertEquals("The provided baseURI must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The provided baseURI must not be null."); return; } fail("Expected an IllegalArgumentException to be thrown."); } - @Test(expected = ResourceAccessException.class) - public void testDataFlowTemplateContructorWithNonExistingUri() throws URISyntaxException { - new DataFlowTemplate(new URI("https://doesnotexist:1234/")); + @Test + void dataFlowTemplateContructorWithNonExistingUri() throws URISyntaxException { + assertThatExceptionOfType(ResourceAccessException.class).isThrownBy(() -> { + new DataFlowTemplate(new URI("https://doesnotexist:1234/"), mapper); + }); } @Test - public void testThatObjectMapperGetsPrepared() { + void thatObjectMapperGetsPrepared() { final ObjectMapper objectMapper = new ObjectMapper(); DataFlowTemplate.prepareObjectMapper(objectMapper); assertCorrectMixins(objectMapper); } @Test - public void testPrepareObjectMapperWithNullObjectMapper() { + void prepareObjectMapperWithNullObjectMapper() { try { DataFlowTemplate.prepareObjectMapper(null); fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The objectMapper must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The objectMapper must not be null."); return; } } @Test - public void testThatDefaultDataflowRestTemplateContainsMixins() { + void thatDefaultDataflowRestTemplateContainsMixins() { final RestTemplate restTemplate = DataFlowTemplate.getDefaultDataflowRestTemplate(); - assertNotNull(restTemplate); - assertTrue(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler); + assertThat(restTemplate).isNotNull(); + assertThat(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler).isTrue(); assertCorrectMixins(restTemplate); @@ -123,10 +135,8 @@ private void assertCorrectMixins(RestTemplate restTemplate) { boolean containsMappingJackson2HttpMessageConverter = false; for (HttpMessageConverter converter : restTemplate.getMessageConverters()) { - if (converter instanceof MappingJackson2HttpMessageConverter) { + if (converter instanceof MappingJackson2HttpMessageConverter jacksonConverter) { containsMappingJackson2HttpMessageConverter = true; - - final MappingJackson2HttpMessageConverter jacksonConverter = (MappingJackson2HttpMessageConverter) converter; assertCorrectMixins(jacksonConverter.getObjectMapper()); } } @@ -138,42 +148,42 @@ private void assertCorrectMixins(RestTemplate restTemplate) { } private void assertCorrectMixins(ObjectMapper objectMapper) { - assertNotNull(objectMapper.findMixInClassFor(JobExecution.class)); - assertNotNull(objectMapper.findMixInClassFor(JobParameters.class)); - assertNotNull(objectMapper.findMixInClassFor(JobParameter.class)); - assertNotNull(objectMapper.findMixInClassFor(JobInstance.class)); - assertNotNull(objectMapper.findMixInClassFor(ExitStatus.class)); - assertNotNull(objectMapper.findMixInClassFor(StepExecution.class)); - assertNotNull(objectMapper.findMixInClassFor(ExecutionContext.class)); - assertNotNull(objectMapper.findMixInClassFor(StepExecutionHistory.class)); + assertThat(objectMapper.findMixInClassFor(JobExecution.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(JobParameters.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(JobParameter.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(JobInstance.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(ExitStatus.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(StepExecution.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(ExecutionContext.class)).isNotNull(); + assertThat(objectMapper.findMixInClassFor(StepExecutionHistory.class)).isNotNull(); } @Test - public void testThatPrepareRestTemplateWithNullContructorValueContainsMixins() { + void thatPrepareRestTemplateWithNullContructorValueContainsMixins() { final RestTemplate restTemplate = DataFlowTemplate.prepareRestTemplate(null); - assertNotNull(restTemplate); - assertTrue(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler); + assertThat(restTemplate).isNotNull(); + assertThat(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler).isTrue(); assertCorrectMixins(restTemplate); } @Test - public void testThatPrepareRestTemplateWithProvidedRestTemplateContainsMixins() { + void thatPrepareRestTemplateWithProvidedRestTemplateContainsMixins() { final RestTemplate providedRestTemplate = new RestTemplate(); final RestTemplate restTemplate = DataFlowTemplate.prepareRestTemplate(providedRestTemplate); - assertNotNull(restTemplate); - assertTrue(providedRestTemplate == restTemplate); - assertTrue(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler); + assertThat(restTemplate).isNotNull(); + assertThat(providedRestTemplate == restTemplate).isTrue(); + assertThat(restTemplate.getErrorHandler() instanceof VndErrorResponseErrorHandler).isTrue(); assertCorrectMixins(restTemplate); } @Test - public void testPrepareRestTemplateWithRestTemplateThatHasNoMessageConverters() { + void prepareRestTemplateWithRestTemplateThatHasNoMessageConverters() { final RestTemplate providedRestTemplate = new RestTemplate(); providedRestTemplate.getMessageConverters().clear(); @@ -181,7 +191,7 @@ public void testPrepareRestTemplateWithRestTemplateThatHasNoMessageConverters() DataFlowTemplate.prepareRestTemplate(providedRestTemplate); } catch (IllegalArgumentException e) { - assertEquals("'messageConverters' must not be empty", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("'messageConverters' must not be empty"); return; } @@ -189,7 +199,7 @@ public void testPrepareRestTemplateWithRestTemplateThatHasNoMessageConverters() } @Test - public void testPrepareRestTemplateWithRestTemplateThatMissesJacksonConverter() { + void prepareRestTemplateWithRestTemplateThatMissesJacksonConverter() { final RestTemplate providedRestTemplate = new RestTemplate(); final Iterator> iterator = providedRestTemplate.getMessageConverters().iterator(); @@ -203,8 +213,7 @@ public void testPrepareRestTemplateWithRestTemplateThatMissesJacksonConverter() DataFlowTemplate.prepareRestTemplate(providedRestTemplate); } catch (IllegalArgumentException e) { - assertEquals("The RestTemplate does not contain a required MappingJackson2HttpMessageConverter.", - e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The RestTemplate does not contain a required MappingJackson2HttpMessageConverter."); return; } @@ -212,36 +221,36 @@ public void testPrepareRestTemplateWithRestTemplateThatMissesJacksonConverter() } @Test - public void testAllActive() throws Exception{ + void allActive() throws Exception{ DataFlowTemplate template = getMockedDataFlowTemplate(true); - assertNotNull(template.taskOperations()); - assertNotNull(template.streamOperations()); - assertNotNull(template.runtimeOperations()); - assertNotNull(template.jobOperations()); - assertNotNull(template.schedulerOperations()); + assertThat(template.taskOperations()).isNotNull(); + assertThat(template.streamOperations()).isNotNull(); + assertThat(template.runtimeOperations()).isNotNull(); + assertThat(template.jobOperations()).isNotNull(); + assertThat(template.schedulerOperations()).isNotNull(); testAlwaysActiveOperations(template); } @Test - public void testAllDeActive() throws Exception{ + void allDeActive() throws Exception{ DataFlowTemplate template = getMockedDataFlowTemplate(false); - assertNull(template.taskOperations()); - assertNull(template.streamOperations()); - assertNull(template.runtimeOperations()); - assertNull(template.jobOperations()); - assertNull(template.schedulerOperations()); + assertThat(template.taskOperations()).isNull(); + assertThat(template.streamOperations()).isNull(); + assertThat(template.runtimeOperations()).isNull(); + assertThat(template.jobOperations()).isNull(); + assertThat(template.schedulerOperations()).isNull(); testAlwaysActiveOperations(template); } private void testAlwaysActiveOperations(DataFlowTemplate template) { //these operations are always active - assertNotNull(template.aboutOperation()); - assertNotNull(template.appRegistryOperations()); - assertNotNull(template.completionOperations()); + assertThat(template.aboutOperation()).isNotNull(); + assertThat(template.appRegistryOperations()).isNotNull(); + assertThat(template.completionOperations()).isNotNull(); } private DataFlowTemplate getMockedDataFlowTemplate(boolean isLinksActive) throws Exception{ @@ -259,6 +268,6 @@ private DataFlowTemplate getMockedDataFlowTemplate(boolean isLinksActive) throws converters.add(new MappingJackson2HttpMessageConverter()); when(restTemplate.getMessageConverters()).thenReturn(converters); URI uri = new URI("foo"); - return new DataFlowTemplate(uri, restTemplate); + return new DataFlowTemplate(uri, restTemplate, mapper); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java index a8e24d9af4..1ec0363115 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextDeserializationTests.java @@ -21,25 +21,23 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.item.ExecutionContext; import org.springframework.util.StreamUtils; -import static org.hamcrest.CoreMatchers.containsString; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.api.Assertions.within; /** * @author Gunnar Hillert + * @author Corneil du Plessis */ -public class ExecutionContextDeserializationTests { +class ExecutionContextDeserializationTests { @Test - public void testDeserializationOfBasicExecutionContext() throws IOException { + void deserializationOfBasicExecutionContext() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); @@ -51,11 +49,11 @@ public void testDeserializationOfBasicExecutionContext() throws IOException { ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference() { }); - assertEquals(2, executionContext.entrySet().size()); - assertEquals("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1", executionContext.get("batch.taskletType")); - assertEquals("org.springframework.batch.core.step.tasklet.TaskletStep", executionContext.get("batch.stepType")); - assertFalse(executionContext.isDirty()); - assertFalse(executionContext.isEmpty()); + assertThat(executionContext.entrySet()).hasSize(2); + assertThat(executionContext.get("batch.taskletType")).isEqualTo("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1"); + assertThat(executionContext.get("batch.stepType")).isEqualTo("org.springframework.batch.core.step.tasklet.TaskletStep"); + assertThat(executionContext.isDirty()).isFalse(); + assertThat(executionContext.isEmpty()).isFalse(); } /** @@ -66,7 +64,7 @@ public void testDeserializationOfBasicExecutionContext() throws IOException { * @throws IOException */ @Test - public void testFaultyExecutionContext() throws IOException { + void faultyExecutionContext() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); @@ -78,15 +76,15 @@ public void testFaultyExecutionContext() throws IOException { ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference() { }); - assertEquals(2, executionContext.entrySet().size()); - assertEquals("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1", executionContext.get("batch.taskletType")); - assertEquals("org.springframework.batch.core.step.tasklet.TaskletStep", executionContext.get("batch.stepType")); - assertTrue(executionContext.isDirty()); - assertFalse(executionContext.isEmpty()); + assertThat(executionContext.entrySet()).hasSize(2); + assertThat(executionContext.get("batch.taskletType")).isEqualTo("org.springframework.cloud.task.app.timestamp.batch.TimestampBatchTaskConfiguration$1"); + assertThat(executionContext.get("batch.stepType")).isEqualTo("org.springframework.batch.core.step.tasklet.TaskletStep"); + assertThat(executionContext.isDirty()).isTrue(); + assertThat(executionContext.isEmpty()).isFalse(); } @Test - public void testExecutionContextWithNonStringValues() throws IOException { + void executionContextWithNonStringValues() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); @@ -98,16 +96,16 @@ public void testExecutionContextWithNonStringValues() throws IOException { final ExecutionContext executionContext = objectMapper.readValue(json, new TypeReference() { }); - assertEquals(6, executionContext.entrySet().size()); - assertEquals(1234, executionContext.getInt("barNumber")); - assertEquals("1234", executionContext.getString("barNumberAsString")); + assertThat(executionContext.entrySet()).hasSize(6); + assertThat(executionContext.getInt("barNumber")).isEqualTo(1234); + assertThat(executionContext.getString("barNumberAsString")).isEqualTo("1234"); try { executionContext.getLong("barNumber"); fail("Expected a ClassCastException to be thrown."); } catch (ClassCastException ce) { - assertThat(ce.getMessage(), containsString("key=[barNumber] is not of type: [class java.lang.Long], it is [(class java.lang.Integer)")); + assertThat(ce.getMessage()).contains("key=[barNumber] is not of type: [class java.lang.Long], it is [(class java.lang.Integer)"); } try { @@ -115,24 +113,24 @@ public void testExecutionContextWithNonStringValues() throws IOException { fail("Expected a ClassCastException to be thrown."); } catch (ClassCastException ce) { - assertThat(ce.getMessage(), containsString("key=[barNumber] is not of type: [class java.lang.Double], it is [(class java.lang.Integer)")); + assertThat(ce.getMessage()).contains("key=[barNumber] is not of type: [class java.lang.Double], it is [(class java.lang.Integer)"); } - assertEquals(22222222222L, executionContext.getLong("longNumber")); + assertThat(executionContext.getLong("longNumber")).isEqualTo(22222222222L); try { executionContext.getInt("longNumber"); fail("Expected a ClassCastException to be thrown."); } catch (ClassCastException ce) { - assertThat(ce.getMessage(), containsString("key=[longNumber] is not of type: [class java.lang.Integer], it is [(class java.lang.Long)")); + assertThat(ce.getMessage()).contains("key=[longNumber] is not of type: [class java.lang.Integer], it is [(class java.lang.Long)"); } - assertEquals("true", executionContext.get("fooBoolean")); - assertEquals(3.5, executionContext.getDouble("floatNumber"), 0.1); - assertEquals("[1,2,3]", executionContext.getString("floatNumberArray")); + assertThat(executionContext.get("fooBoolean")).isEqualTo("true"); + assertThat(executionContext.getDouble("floatNumber")).isCloseTo(3.5, within(0.1)); + assertThat(executionContext.getString("floatNumberArray")).isEqualTo("[1,2,3]"); - assertFalse(executionContext.isDirty()); - assertFalse(executionContext.isEmpty()); + assertThat(executionContext.isDirty()).isFalse(); + assertThat(executionContext.isEmpty()).isFalse(); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java index 899aa9ca6a..c99a32769f 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/ExecutionContextSerializationTests.java @@ -19,20 +19,21 @@ import java.io.IOException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.item.ExecutionContext; -import static org.junit.Assert.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Gunnar Hillert * @author Glenn Renfro + * @author Corneil du Plessis */ -public class ExecutionContextSerializationTests { +class ExecutionContextSerializationTests { @Test - public void testSerializationOfExecutionContext() throws IOException { + void serializationOfExecutionContext() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); final ExecutionContext stepExecutionExecutionContext = new ExecutionContext(); @@ -41,7 +42,7 @@ public void testSerializationOfExecutionContext() throws IOException { final String serializedExecutionContext = objectMapper.writeValueAsString(stepExecutionExecutionContext); final String expectedExecutionContext = "{\"dirty\":true,\"empty\":false,\"values\":[{\"foo\":\"bar\"},{\"foo2\":\"bar2\"}]}"; - assertEquals(expectedExecutionContext, serializedExecutionContext); + assertThat(serializedExecutionContext).isEqualTo(expectedExecutionContext); } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java index 5fad4dc1ed..cb73bd635b 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/JobExecutionDeserializationTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,8 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.StepExecution; import org.springframework.batch.item.ExecutionContext; @@ -30,19 +31,20 @@ import org.springframework.hateoas.PagedModel; import org.springframework.util.StreamUtils; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.assertj.core.api.Assertions.assertThat; /** * @author Gunnar Hillert * @author Glenn Renfro + * @author Corneil du Plessis */ -public class JobExecutionDeserializationTests { +class JobExecutionDeserializationTests { @Test - public void testDeserializationOfMultipleJobExecutions() throws IOException { + void deserializationOfMultipleJobExecutions() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); + objectMapper.registerModule(new JavaTimeModule()); final InputStream inputStream = JobExecutionDeserializationTests.class .getResourceAsStream("/JobExecutionJson.txt"); @@ -50,19 +52,21 @@ public void testDeserializationOfMultipleJobExecutions() throws IOException { final String json = new String(StreamUtils.copyToByteArray(inputStream)); final PagedModel> paged = objectMapper.readValue(json, - new TypeReference>>() { + new TypeReference<>() { }); final JobExecutionResource jobExecutionResource = paged.getContent().iterator().next().getContent(); - assertEquals("Expect 1 JobExecutionInfoResource", 6, paged.getContent().size()); - assertEquals(Long.valueOf(6), jobExecutionResource.getJobId()); - assertEquals("job200616815", jobExecutionResource.getName()); - assertEquals("COMPLETED", jobExecutionResource.getJobExecution().getStatus().name()); + assertThat(paged.getContent().size()).as("Expect 1 JobExecutionInfoResource").isEqualTo(6); + assertThat(jobExecutionResource.getJobId()).isEqualTo(Long.valueOf(6)); + assertThat(jobExecutionResource.getName()).isEqualTo("job200616815"); + assertThat(jobExecutionResource.getJobExecution().getStatus().name()).isEqualTo("COMPLETED"); } @Test - public void testDeserializationOfSingleJobExecution() throws IOException { + void deserializationOfSingleJobExecution() throws IOException { final ObjectMapper objectMapper = DataFlowTemplate.prepareObjectMapper(new ObjectMapper()); + objectMapper.registerModule(new JavaTimeModule()); + final InputStream inputStream = JobExecutionDeserializationTests.class .getResourceAsStream("/SingleJobExecutionJson.txt"); @@ -71,19 +75,19 @@ public void testDeserializationOfSingleJobExecution() throws IOException { final JobExecutionResource jobExecutionInfoResource = objectMapper.readValue(json, JobExecutionResource.class); - assertNotNull(jobExecutionInfoResource); - assertEquals(Long.valueOf(1), jobExecutionInfoResource.getJobId()); - assertEquals("ff.job", jobExecutionInfoResource.getName()); - assertEquals("COMPLETED", jobExecutionInfoResource.getJobExecution().getStatus().name()); - assertEquals(1, jobExecutionInfoResource.getJobExecution().getStepExecutions().size()); + assertThat(jobExecutionInfoResource).isNotNull(); + assertThat(jobExecutionInfoResource.getJobId()).isEqualTo(Long.valueOf(1)); + assertThat(jobExecutionInfoResource.getName()).isEqualTo("ff.job"); + assertThat(jobExecutionInfoResource.getJobExecution().getStatus().name()).isEqualTo("COMPLETED"); + assertThat(jobExecutionInfoResource.getJobExecution().getStepExecutions()).hasSize(1); final StepExecution stepExecution = jobExecutionInfoResource.getJobExecution().getStepExecutions().iterator().next(); - assertNotNull(stepExecution); + assertThat(stepExecution).isNotNull(); final ExecutionContext stepExecutionExecutionContext = stepExecution.getExecutionContext(); - assertNotNull(stepExecutionExecutionContext); - assertEquals(2, stepExecutionExecutionContext.size()); + assertThat(stepExecutionExecutionContext).isNotNull(); + assertThat(stepExecutionExecutionContext.size()).isEqualTo(2); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplateTests.java new file mode 100644 index 0000000000..3ca2db3b37 --- /dev/null +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/RuntimeTemplateTests.java @@ -0,0 +1,146 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.rest.client; + +import java.util.Collections; +import java.util.Map; +import java.util.Optional; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; +import org.springframework.hateoas.Link; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.web.client.RestTemplate; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit tests for {@link RuntimeTemplate}. + * + * @author Chris Bono + */ +class RuntimeTemplateTests { + + private RuntimeTemplate runtimeTemplate; + + private RestTemplate restTemplate; + + private RepresentationModel resources; + + private final String appId = "flipflop3.log-v1"; + + private final String instanceId = "flipflop3.log-v1-0"; + + private final String endpoint = "info"; + + @BeforeEach + void prepareUriTemplate() { + Link actuatorGetLink = mock(Link.class); + when(actuatorGetLink.getHref()).thenReturn("actuator-get-link"); + + Link actuatorPostLink = mock(Link.class); + when(actuatorPostLink.getHref()).thenReturn("actuator-post-link"); + + Link actuatorLink = mock(Link.class); + when(actuatorLink.expand(appId, instanceId, endpoint)).thenReturn(actuatorGetLink); + when(actuatorLink.expand(appId, instanceId)).thenReturn(actuatorPostLink); + + resources = mock(RepresentationModel.class); + when(resources.getLink("runtime/apps")).thenReturn(Optional.of(mock(Link.class))); + when(resources.getLink("runtime/apps/{appId}")).thenReturn(Optional.of(mock(Link.class))); + when(resources.getLink("runtime/apps/{appId}/instances/{instanceId}/actuator")).thenReturn(Optional.of(actuatorLink)); + when(resources.getLink("runtime/streams/{streamNames}")).thenReturn(Optional.of(mock(Link.class))); + + restTemplate = mock(RestTemplate.class); + runtimeTemplate = new RuntimeTemplate(restTemplate, resources); + + // Test Premise: + // Mocks are constructed in manner that ensures only requests for our chosen appId/instanceId/endpoint will + // result in a non-null answer to 'Link.getHref' (which is then passed into the RestTemplate). + } + + @Test + void getFromActuator() { + runtimeTemplate.getFromActuator(appId, instanceId, endpoint); + verify(restTemplate).getForObject("actuator-get-link", String.class); + } + + @Test + void postToActuatorWithBodyMap() { + Map body = Collections.singletonMap("name", "extra"); + ActuatorPostRequest expectedPostRequest = new ActuatorPostRequest(); + expectedPostRequest.setEndpoint(endpoint); + expectedPostRequest.setBody(body); + runtimeTemplate.postToActuator(appId, instanceId, endpoint, body); + verify(restTemplate).postForObject(eq("actuator-post-link"), eq(expectedPostRequest), eq(Object.class)); + } + + @Test + void postToActuatorWithEmptyBodyMap() { + ActuatorPostRequest expectedPostRequest = new ActuatorPostRequest(); + expectedPostRequest.setEndpoint(endpoint); + expectedPostRequest.setBody(Collections.emptyMap()); + runtimeTemplate.postToActuator(appId, instanceId, endpoint, Collections.emptyMap()); + verify(restTemplate).postForObject(eq("actuator-post-link"), eq(expectedPostRequest), eq(Object.class)); + } + + @Test + void postToActuatorWithNullBodyMap() { + ActuatorPostRequest expectedPostRequest = new ActuatorPostRequest(); + expectedPostRequest.setEndpoint(endpoint); + runtimeTemplate.postToActuator(appId, instanceId, endpoint, null); + verify(restTemplate).postForObject(eq("actuator-post-link"), eq(expectedPostRequest), eq(Object.class)); + } + + @Test + void appStatusesUriTemplateIsRequired() { + when(resources.getLink("runtime/apps")).thenReturn(Optional.empty()); + assertThatThrownBy(() -> new RuntimeTemplate(restTemplate, resources)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Unable to retrieve URI template for runtime/apps"); + } + + @Test + void appStatusUriTemplateIsRequired() { + when(resources.getLink("runtime/apps/{appId}")).thenReturn(Optional.empty()); + assertThatThrownBy(() -> new RuntimeTemplate(restTemplate, resources)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Unable to retrieve URI template for runtime/apps/{appId}"); + } + + @Test + void streamStatusUriTemplateIsRequired() { + when(resources.getLink("runtime/streams/{streamNames}")).thenReturn(Optional.empty()); + assertThatThrownBy(() -> new RuntimeTemplate(restTemplate, resources)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Unable to retrieve URI template for runtime/streams/{streamNames}"); + } + + @Test + void actuatorUriTemplateIsNotRequiredForBackwardsCompatibility() { + when(resources.getLink("runtime/apps/{appId}/instances/{instanceId}/actuator")).thenReturn(Optional.empty()); + RuntimeTemplate runtimeTemplate = new RuntimeTemplate(restTemplate, resources); + assertThat(runtimeTemplate).hasFieldOrPropertyWithValue("appActuatorUriTemplate", null); + } +} diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java index a0ab353b83..f14a36eef8 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/SchedulerTemplateTests.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.Optional; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.cloud.dataflow.rest.resource.RootResource; @@ -38,8 +38,9 @@ /** * @author Glenn Renfro + * @author Corneil du Plessis */ -public class SchedulerTemplateTests { +class SchedulerTemplateTests { private static final String SCHEDULES_RELATION = org.springframework.cloud.dataflow.rest.client.SchedulerTemplate.SCHEDULES_RELATION; private static final String SCHEDULES_RELATION_INSTANCE = SCHEDULES_RELATION + "/instances"; private static final String DEFAULT_SCHEDULE_NAME = "testSchedule"; @@ -49,22 +50,22 @@ public class SchedulerTemplateTests { private RestTemplate restTemplate; private SchedulerTemplate template; - @Before - public void setup() { + @BeforeEach + void setup() { rootResource = mock(RootResource.class); - when(rootResource.getLink(SCHEDULES_RELATION)).thenReturn(Optional.of(new Link(SCHEDULES_RELATION))); - when(rootResource.getLink(SCHEDULES_RELATION_INSTANCE)).thenReturn(Optional.of(new Link(SCHEDULES_RELATION_INSTANCE))); + when(rootResource.getLink(SCHEDULES_RELATION)).thenReturn(Optional.of(Link.of(SCHEDULES_RELATION))); + when(rootResource.getLink(SCHEDULES_RELATION_INSTANCE)).thenReturn(Optional.of(Link.of(SCHEDULES_RELATION_INSTANCE))); restTemplate = mock(RestTemplate.class); template = new SchedulerTemplate(restTemplate, rootResource); } @Test - public void scheduleTest() { + void scheduleTest() { verifyControllerResult(null); } @Test - public void multiPlatformScheduleTest() { + void multiPlatformScheduleTest() { verifyControllerResult("default"); verifyControllerResult("foo"); } @@ -86,75 +87,75 @@ private void verifyControllerResult(String platform) { } @Test - public void unScheduleTest() { + void unScheduleTest() { template.unschedule(DEFAULT_SCHEDULE_NAME); Mockito.verify(restTemplate).delete(SCHEDULES_RELATION + "/testSchedule"); } @Test - public void unSchedulePlatformTest() { + void unSchedulePlatformTest() { template.unschedule(DEFAULT_SCHEDULE_NAME, "foo"); Mockito.verify(restTemplate).delete(SCHEDULES_RELATION + "/testSchedule?platform=foo"); } @Test - public void unScheduleNullTest() { + void unScheduleNullTest() { template.unschedule(DEFAULT_SCHEDULE_NAME, null); Mockito.verify(restTemplate).delete(SCHEDULES_RELATION + "/testSchedule"); } @Test - public void listTest() { + void listTest() { template.list(); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION, ScheduleInfoResource.Page.class); } @Test - public void listByPlatformNullTest() { + void listByPlatformNullTest() { template.listByPlatform(null); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION, ScheduleInfoResource.Page.class); } @Test - public void listByPlatformTest() { + void listByPlatformTest() { template.listByPlatform("foo"); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION + "?platform=foo", ScheduleInfoResource.Page.class); } @Test - public void listTaskDefNameTest() { + void listTaskDefNameTest() { template.list(DEFAULT_DEFINITION_NAME); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION_INSTANCE, ScheduleInfoResource.Page.class); } @Test - public void listTaskDefNameNullTest() { + void listTaskDefNameNullTest() { template.list(DEFAULT_DEFINITION_NAME, null); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION_INSTANCE, ScheduleInfoResource.Page.class); } @Test - public void listTaskDefNamePlatformTest() { + void listTaskDefNamePlatformTest() { template.list(DEFAULT_DEFINITION_NAME, "foo"); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION_INSTANCE + "?platform=foo", ScheduleInfoResource.Page.class); } @Test - public void getScheduleTest() { + void getScheduleTest() { template.getSchedule(DEFAULT_SCHEDULE_NAME); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION + "/" + DEFAULT_SCHEDULE_NAME, ScheduleInfoResource.class); } @Test - public void getScheduleNullTest() { + void getScheduleNullTest() { template.getSchedule(DEFAULT_SCHEDULE_NAME, null); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION + "/" + DEFAULT_SCHEDULE_NAME, ScheduleInfoResource.class); } @Test - public void getSchedulePlatformTest() { + void getSchedulePlatformTest() { template.getSchedule(DEFAULT_SCHEDULE_NAME, "foo"); Mockito.verify(restTemplate).getForObject(SCHEDULES_RELATION + "/" + DEFAULT_SCHEDULE_NAME + "?platform=foo", ScheduleInfoResource.class); diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java index 8120e4344e..8d35a5e88e 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java @@ -20,67 +20,60 @@ import java.util.Map; import java.util.Optional; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.hateoas.Link; import org.springframework.hateoas.RepresentationModel; import org.springframework.web.client.RestTemplate; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.mockito.Mockito.mock; /** * Test the {@link TaskTemplate} implementation of {@link TaskOperations}. * * @author Glenn Renfro + * @author Corneil du Plessis */ -public class TaskTemplateTests { +class TaskTemplateTests { private static final String CURRENT_TASK_EXECUTION_LINK = "tasks/executions/current"; private RestTemplate restTemplate; - @Before - public void setup() { + @BeforeEach + void setup() { restTemplate = mock(RestTemplate.class); - } - @Test - public void testOldDataFlow() { - validateExecutionLinkNotPresent("1.6.0"); + void invalidVersion() { + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> validateExecutionLinkPresent("2.11.5")); } - @Test - public void testMinDataFlow() { - validateExecutionLinkPresent("1.7.0"); + void minDataFlow() { + validateExecutionLinkPresent("3.0.0"); } @Test - public void testFutureDataFlow() { - validateExecutionLinkPresent("1.8.0"); - validateExecutionLinkPresent("1.9.0"); - validateExecutionLinkPresent("2.0.0"); + void futureDataFlow() { + validateExecutionLinkPresent("3.0.0"); } private void validateExecutionLinkPresent(String dataFlowVersion) { TestResource testResource = new TestResource(); new TaskTemplate(this.restTemplate, testResource, dataFlowVersion); - Assert.assertTrue(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)); + assertThat(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)).isTrue(); } - private void validateExecutionLinkNotPresent(String version) { - TestResource testResource = new TestResource(); - new TaskTemplate(this.restTemplate, testResource, version); - Assert.assertFalse(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)); - } - - public static class TestResource extends RepresentationModel { + public static class TestResource extends RepresentationModel { - private Map linksRequested = new HashMap<>(); + private final Map linksRequested = new HashMap<>(); + @Override public Optional getLink(String rel) { if (this.linksRequested.containsKey(rel)) { Long count = this.linksRequested.get(rel); @@ -90,16 +83,13 @@ public Optional getLink(String rel) { this.linksRequested.put(rel, 1L); } - return Optional.of(new Link("foo", "bar")); + return Optional.of(Link.of("foo", "bar")); } public boolean isLinkRequested(String linkName) { - boolean result = false; + boolean result = this.linksRequested.containsKey(linkName) && + this.linksRequested.get(linkName) > 1L; - if (this.linksRequested.containsKey(linkName) && - this.linksRequested.get(linkName) > 1L) { - result = true; - } return result; } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java index c43871a4d6..9cae0c7d94 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/VersionUtilsTests.java @@ -15,7 +15,7 @@ */ package org.springframework.cloud.dataflow.rest.client; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.rest.client.support.VersionUtils; @@ -23,11 +23,12 @@ /** * @author Mark Pollack + * @author Corneil du Plessis */ -public class VersionUtilsTests { +class VersionUtilsTests { @Test - public void testNullAndBlank() { + void nullAndBlank() { String threePartVersion = VersionUtils.getThreePartVersion(null); assertThat(threePartVersion).isEmpty(); @@ -39,7 +40,7 @@ public void testNullAndBlank() { } @Test - public void badFormat() { + void badFormat() { String threePartVersion = VersionUtils.getThreePartVersion("1.3"); assertThat(threePartVersion).isEmpty(); @@ -55,7 +56,7 @@ public void badFormat() { } @Test - public void testValid() { + void valid() { String threePartVersion = VersionUtils.getThreePartVersion("1.3.4"); assertThat(threePartVersion).isEqualTo("1.3.4"); diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java index def6ed7f98..0b18fc7152 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java @@ -28,12 +28,12 @@ import static org.assertj.core.api.Assertions.assertThat; -public class DataFlowClientPropertiesTests { +class DataFlowClientPropertiesTests { private final ApplicationContextRunner contextRunner = new ApplicationContextRunner(); @Test - public void testDefaults() { + void defaults() { this.contextRunner .withUserConfiguration(Config1.class) .run((context) -> { @@ -52,7 +52,7 @@ public void testDefaults() { } @Test - public void testBasicAuth() { + void basicAuth() { this.contextRunner .withInitializer(context -> { Map map = new HashMap<>(); @@ -70,7 +70,7 @@ public void testBasicAuth() { } @Test - public void testLegacyOauth() { + void legacyOauth() { this.contextRunner .withInitializer(context -> { Map map = new HashMap<>(); @@ -89,12 +89,12 @@ public void testLegacyOauth() { assertThat(properties.getAuthentication().getClientId()).isEqualTo("id1"); assertThat(properties.getAuthentication().getClientSecret()).isEqualTo("secret1"); assertThat(properties.getAuthentication().getTokenUri()).isEqualTo("uri1"); - assertThat(properties.getAuthentication().getScope()).containsExactly("s1", "s2"); + assertThat(properties.getAuthentication().getScope()).containsExactlyInAnyOrder("s1", "s2"); }); } @Test - public void testCommonSpringSecurity() { + void commonSpringSecurity() { this.contextRunner .withInitializer(context -> { Map map = new HashMap<>(); diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java index 168b0980dd..ee753e014a 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/dsl/StreamDslTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,13 +15,11 @@ */ package org.springframework.cloud.dataflow.rest.client.dsl; -import java.util.Arrays; import java.util.Collections; import java.util.Map; -import org.apache.commons.lang.StringUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; @@ -42,6 +40,7 @@ import org.springframework.hateoas.PagedModel; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; import static org.mockito.Mockito.anyBoolean; import static org.mockito.Mockito.anyMap; import static org.mockito.Mockito.anyString; @@ -56,9 +55,10 @@ /** * @author Vinicius Carvalho * @author Christian Tzolov + * @author Corneil du Plessis */ @SuppressWarnings("unchecked") -public class StreamDslTests { +class StreamDslTests { @Mock private DataFlowOperations client; @@ -69,21 +69,21 @@ public class StreamDslTests { @Mock private RuntimeOperations runtimeOperations; - private StreamApplication timeApplication = new StreamApplication("time"); + private final StreamApplication timeApplication = new StreamApplication("time"); - private StreamApplication filterApplication = new StreamApplication("filter"); + private final StreamApplication filterApplication = new StreamApplication("filter"); - private StreamApplication logApplication = new StreamApplication("log"); + private final StreamApplication logApplication = new StreamApplication("log"); - @Before - public void init() { + @BeforeEach + void init() { MockitoAnnotations.initMocks(this); when(client.streamOperations()).thenReturn(this.streamOperations); when(client.runtimeOperations()).thenReturn(this.runtimeOperations); } @Test - public void simpleDefinition() { + void simpleDefinition() { StreamApplication time = new StreamApplication("time"); StreamApplication log = new StreamApplication("log"); Stream stream = Stream.builder(client).name("foo").source(time).sink(log).create() @@ -92,7 +92,7 @@ public void simpleDefinition() { } @Test - public void definitionWithLabel() { + void definitionWithLabel() { StreamApplication time = new StreamApplication("time").label("tick"); StreamApplication log = new StreamApplication("log"); @@ -102,7 +102,7 @@ public void definitionWithLabel() { } @Test - public void definitionWithProcessor() { + void definitionWithProcessor() { StreamApplication time = new StreamApplication("time").label("tick"); StreamApplication filter = new StreamApplication("filter"); StreamApplication log = new StreamApplication("log"); @@ -112,7 +112,7 @@ public void definitionWithProcessor() { } @Test - public void definitionWithProperties() { + void definitionWithProperties() { StreamApplication time = new StreamApplication("time").label("tick") .addProperty("fixed-delay", 5000); StreamApplication log = new StreamApplication("log"); @@ -123,16 +123,16 @@ public void definitionWithProperties() { } @Test - public void definitionWithDeploymentProperties() { + void definitionWithDeploymentProperties() { StreamApplication time = new StreamApplication("time").label("tick") .addProperty("fixed-delay", "5000").addDeploymentProperty("count", 2); Map deploymentProperties = time.getDeploymentProperties(); - assertThat(deploymentProperties.get("deployer.tick.count")).isEqualTo(2); + assertThat(deploymentProperties).containsEntry("deployer.tick.count", 2); } @Test - public void definitionWithDeploymentPropertiesBuilder() { + void definitionWithDeploymentPropertiesBuilder() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "tick: time | log", "time | log", "demo stream"); resource.setStatus("deploying"); @@ -155,7 +155,7 @@ public void definitionWithDeploymentPropertiesBuilder() { } @Test - public void deployWithCreate() { + void deployWithCreate() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); resource.setStatus("deploying"); @@ -171,7 +171,7 @@ public void deployWithCreate() { } @Test - public void deployWithDefinition() { + void deployWithDefinition() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); resource.setStatus("deploying"); @@ -188,7 +188,7 @@ public void deployWithDefinition() { } @Test - public void getStatus() { + void getStatus() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); resource.setStatus("unknown"); @@ -210,7 +210,7 @@ public void getStatus() { } @Test - public void createStream() { + void createStream() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); resource.setStatus("deploying"); @@ -224,25 +224,27 @@ public void createStream() { } @Test - public void testDuplicateNameWithLabel() { + void duplicateNameWithLabel() { StreamApplication filter2 = new StreamApplication("filter").label("filter2"); Stream.builder(client).name("test").source(timeApplication) .processor(filterApplication).processor(filter2).sink(logApplication) .create(); verify(streamOperations, times(1)).createStream( - eq("test"), eq("time | filter | filter2: filter | log"), eq(StringUtils.EMPTY), + eq("test"), eq("time | filter | filter2: filter | log"), eq(""), eq(false)); } - @Test(expected = IllegalStateException.class) - public void testDuplicateNameNoLabel() { - Stream.builder(client).name("test").source(timeApplication) + @Test + void duplicateNameNoLabel() { + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> { + Stream.builder(client).name("test").source(timeApplication) .processor(filterApplication).processor(filterApplication) .sink(logApplication).create(); + }); } @Test - public void update() { + void update() { StreamDefinitionResource ticktockDefinition = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); ticktockDefinition.setStatus("deploying"); @@ -264,7 +266,7 @@ public void update() { } @Test - public void logs() { + void logs() { String streamLog = "Test stream log"; String appLog = "Test app log"; StreamDefinitionResource ticktockDefinition = new StreamDefinitionResource("ticktock", "time | log", @@ -278,12 +280,12 @@ public void logs() { AppStatusResource appStatusResource = new AppStatusResource("deploymentId", "deployed"); - appStatusResource.setInstances(new CollectionModel(Arrays.asList(new AppInstanceStatusResource("instanceId", "deployed", - Collections.singletonMap(StreamRuntimePropertyKeys.ATTRIBUTE_SKIPPER_APPLICATION_NAME, "log"))))); - streamStatusResource.setApplications(new CollectionModel<>(Arrays.asList(appStatusResource))); + appStatusResource.setInstances(CollectionModel.of(Collections.singletonList(new AppInstanceStatusResource("instanceId", "deployed", + Collections.singletonMap(StreamRuntimePropertyKeys.ATTRIBUTE_SKIPPER_APPLICATION_NAME, "log"))))); + streamStatusResource.setApplications(CollectionModel.of(Collections.singletonList(appStatusResource))); when(runtimeOperations.streamStatus(ticktockDefinition.getName())) - .thenReturn(new PagedModel(Arrays.asList(streamStatusResource), null)); + .thenReturn(PagedModel.of(Collections.singletonList(streamStatusResource), (PagedModel.PageMetadata) null)); Stream stream = Stream.builder(client).name(ticktockDefinition.getName()).description("demo stream") .definition(ticktockDefinition.getDslText()).create() @@ -301,7 +303,7 @@ public void logs() { } @Test - public void rollback() { + void rollback() { StreamDefinitionResource ticktockDefinition = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); ticktockDefinition.setStatus("deploying"); @@ -321,7 +323,7 @@ public void rollback() { } @Test - public void manifest() { + void manifest() { StreamDefinitionResource ticktockDefinition = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); ticktockDefinition.setStatus("deploying"); @@ -338,7 +340,7 @@ public void manifest() { } @Test - public void history() { + void history() { StreamDefinitionResource ticktockDefinition = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); ticktockDefinition.setStatus("deploying"); @@ -355,7 +357,7 @@ public void history() { } @Test - public void undeploy() { + void undeploy() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); resource.setStatus("deploying"); @@ -377,7 +379,7 @@ public void undeploy() { } @Test - public void destroy() { + void destroy() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); resource.setStatus("deploying"); @@ -398,7 +400,7 @@ public void destroy() { } @Test - public void scaleApplicationInstances() { + void scaleApplicationInstances() { StreamDefinitionResource resource = new StreamDefinitionResource("ticktock", "time | log", "time | log", "demo stream"); resource.setStatus("deploying"); diff --git a/spring-cloud-dataflow-rest-client/src/test/resources/JobExecutionJson.txt b/spring-cloud-dataflow-rest-client/src/test/resources/JobExecutionJson.txt index 747f110650..a24778d761 100644 --- a/spring-cloud-dataflow-rest-client/src/test/resources/JobExecutionJson.txt +++ b/spring-cloud-dataflow-rest-client/src/test/resources/JobExecutionJson.txt @@ -48,7 +48,6 @@ "failureExceptions": [ ], - "jobConfigurationName": null, "running": false, "jobId": 6, "stopping": false, @@ -117,7 +116,6 @@ "failureExceptions": [ ], - "jobConfigurationName": null, "running": false, "jobId": 5, "stopping": false, @@ -186,7 +184,6 @@ "failureExceptions": [ ], - "jobConfigurationName": null, "running": false, "jobId": 4, "stopping": false, @@ -225,42 +222,42 @@ "-spring.datasource.driverClassName": { "identifying": false, "value": "com.mysql.jdbc.Driver", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.password": { "identifying": false, "value": "password", - "type": "STRING" + "type": "java.lang.String" }, "-spring.cloud.task.name": { "identifying": false, "value": "foo", - "type": "STRING" + "type": "java.lang.String" }, "-spring.jmx.default-domain": { "identifying": false, "value": "foo.taskbatch", - "type": "STRING" + "type": "java.lang.String" }, "-dataflow.group-deployment-id": { "identifying": false, "value": "foo-1458075239963", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.username": { "identifying": false, "value": "root", - "type": "STRING" + "type": "java.lang.String" }, "-server.port": { "identifying": false, "value": "25553", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.url": { "identifying": false, "value": "jdbc:mysql:\/\/localhost:3306\/practice", - "type": "STRING" + "type": "java.lang.String" } }, "empty": false @@ -294,7 +291,6 @@ "failureExceptions": [ ], - "jobConfigurationName": null, "running": false, "jobId": 3, "stopping": false, @@ -370,7 +366,6 @@ "failureExceptions": [ ], - "jobConfigurationName": null, "running": false, "jobId": 2, "stopping": false, @@ -409,42 +404,42 @@ "-spring.datasource.driverClassName": { "identifying": false, "value": "com.mysql.jdbc.Driver", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.password": { "identifying": false, "value": "password", - "type": "STRING" + "type": "java.lang.String" }, "-spring.cloud.task.name": { "identifying": false, "value": "foo", - "type": "STRING" + "type": "java.lang.String" }, "-spring.jmx.default-domain": { "identifying": false, "value": "foo.taskbatch", - "type": "STRING" + "type": "java.lang.String" }, "-dataflow.group-deployment-id": { "identifying": false, "value": "foo-1458074090919", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.username": { "identifying": false, "value": "root", - "type": "STRING" + "type": "java.lang.String" }, "-server.port": { "identifying": false, "value": "26244", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.url": { "identifying": false, "value": "jdbc:mysql:\/\/localhost:3306\/practice", - "type": "STRING" + "type": "java.lang.String" } }, "empty": false @@ -478,7 +473,6 @@ "failureExceptions": [ ], - "jobConfigurationName": null, "running": false, "jobId": 1, "stopping": false, diff --git a/spring-cloud-dataflow-rest-client/src/test/resources/SingleJobExecutionJson.txt b/spring-cloud-dataflow-rest-client/src/test/resources/SingleJobExecutionJson.txt index d12baab93a..8217b578a5 100644 --- a/spring-cloud-dataflow-rest-client/src/test/resources/SingleJobExecutionJson.txt +++ b/spring-cloud-dataflow-rest-client/src/test/resources/SingleJobExecutionJson.txt @@ -15,42 +15,42 @@ "-spring.datasource.driverClassName": { "identifying": false, "value": "com.mysql.jdbc.Driver", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.password": { "identifying": false, "value": "password", - "type": "STRING" + "type": "java.lang.String" }, "-spring.cloud.task.name": { "identifying": false, "value": "foo", - "type": "STRING" + "type": "java.lang.String" }, "-spring.jmx.default-domain": { "identifying": false, "value": "foo.taskbatch", - "type": "STRING" + "type": "java.lang.String" }, "-dataflow.group-deployment-id": { "identifying": false, "value": "foo-1458074090919", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.username": { "identifying": false, "value": "root", - "type": "STRING" + "type": "java.lang.String" }, "-server.port": { "identifying": false, "value": "26244", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.url": { "identifying": false, "value": "jdbc:mysql:\/\/localhost:3306\/practice", - "type": "STRING" + "type": "java.lang.String" } }, "empty": false @@ -106,42 +106,42 @@ "-spring.datasource.driverClassName": { "identifying": false, "value": "com.mysql.jdbc.Driver", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.password": { "identifying": false, "value": "password", - "type": "STRING" + "type": "java.lang.String" }, "-spring.cloud.task.name": { "identifying": false, "value": "foo", - "type": "STRING" + "type": "java.lang.String" }, "-spring.jmx.default-domain": { "identifying": false, "value": "foo.taskbatch", - "type": "STRING" + "type": "java.lang.String" }, "-dataflow.group-deployment-id": { "identifying": false, "value": "foo-1458074090919", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.username": { "identifying": false, "value": "root", - "type": "STRING" + "type": "java.lang.String" }, "-server.port": { "identifying": false, "value": "26244", - "type": "STRING" + "type": "java.lang.String" }, "-spring.datasource.url": { "identifying": false, "value": "jdbc:mysql:\/\/localhost:3306\/practice", - "type": "STRING" + "type": "java.lang.String" } }, "empty": false @@ -169,7 +169,6 @@ "failureExceptions": [ ], - "jobConfigurationName": null, "running": false, "jobId": 1, "stopping": false, diff --git a/spring-cloud-dataflow-rest-resource/pom.xml b/spring-cloud-dataflow-rest-resource/pom.xml index 1e9f2d5d5d..76707d2c66 100644 --- a/spring-cloud-dataflow-rest-resource/pom.xml +++ b/spring-cloud-dataflow-rest-resource/pom.xml @@ -1,13 +1,21 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-rest-resource + spring-cloud-dataflow-rest-resource + Data Flow Rest Resource jar + + true + 3.4.1 + org.springframework.boot @@ -23,6 +31,7 @@ org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.hateoas @@ -37,8 +46,12 @@ spring-cloud-task-core - org.apache.httpcomponents - httpclient + org.apache.httpcomponents.client5 + httpclient5 + + + joda-time + joda-time com.fasterxml.jackson.core @@ -57,28 +70,71 @@ commons-io - junit - junit - test + org.codehaus.jettison + jettison - org.hamcrest - hamcrest-core + org.springframework.boot + spring-boot-starter-test test + - org.hamcrest - hamcrest-library + org.springframework.boot + spring-boot-starter-web test + - org.springframework.boot - spring-boot-starter-test + com.h2database + h2 test org.springframework.cloud spring-cloud-skipper + ${project.version} - + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.13.0 + + true + ${java.version} + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java index b429c8b2f0..e29b180985 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/StepExecutionHistory.java @@ -16,11 +16,14 @@ package org.springframework.cloud.dataflow.rest.job; -import java.util.Date; +import java.time.Duration; +import java.time.LocalDateTime; import org.springframework.batch.core.StepExecution; /** + * Stores the cumulative information for a specific {@link StepExecution}'s history. + * * @author Glenn Renfro */ public class StepExecutionHistory { @@ -57,9 +60,9 @@ public void append(StepExecution stepExecution) { // ignore unfinished executions return; } - Date startTime = stepExecution.getStartTime(); - Date endTime = stepExecution.getEndTime(); - long time = endTime.getTime() - startTime.getTime(); + LocalDateTime startTime = stepExecution.getStartTime(); + LocalDateTime endTime = stepExecution.getEndTime(); + long time = Duration.between(startTime, endTime).toMillis(); duration.append(time); if (stepExecution.getReadCount() > 0) { durationPerRead.append(time / stepExecution.getReadCount()); @@ -79,6 +82,11 @@ public String getStepName() { return stepName; } + /** + * Returns the number of {@link StepExecution}s are being used for history calculations. + * The id of an existing step execution for a specific job execution (required) + * @return the number of {@link StepExecution}s. + */ public int getCount() { return count; } @@ -115,6 +123,10 @@ public CumulativeHistory getProcessSkipCount() { return processSkipCount; } + /** + * Stores the cumulative history for a specified {@link StepExecution}'s duration. + * @return {@link CumulativeHistory} for the duration of a specified {@link StepExecution}. + */ public CumulativeHistory getDuration() { return duration; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java index 2c029d36da..6d2e79cc4d 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java @@ -74,4 +74,14 @@ public boolean isTaskDefined() { public int getStepExecutionCount() { return stepExecutionCount; } + + @Override + public String toString() { + return "TaskJobExecution{" + + "taskId=" + taskId + + ", isTaskDefined=" + isTaskDefined + + ", jobExecution=" + jobExecution + + ", stepExecutionCount=" + stepExecutionCount + + '}'; + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java index f3f6749ff4..e8a8c0b837 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java @@ -38,16 +38,7 @@ public class TaskJobExecutionRel { private final TaskManifest taskManifest; - /** - * Constructor that establishes the relationship between a {@link TaskExecution} and - * the Job Execution Ids of the jobs that were executed within it. - * - * @param taskExecution to be associated with the job execution ids. - * @param jobExecutionIds to be associated with the task execution. - */ - public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionIds) { - this(taskExecution, jobExecutionIds, null); - } + private final TaskJobExecution composedTaskJobExecution; /** * Constructor that establishes the relationship between a {@link TaskExecution} and @@ -55,10 +46,11 @@ public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionI * * @param taskExecution to be associated with the job execution ids. * @param jobExecutionIds to be associated with the task execution. - * @param taskManifest to be associated with the task execution + * @param taskManifest to be associated with the task execution. + * @param composedTaskJobExecution to be associated with the task execution. */ - public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionIds, TaskManifest taskManifest) { - Assert.notNull(taskExecution, "taskExecution must not be null");; + public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionIds, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { + Assert.notNull(taskExecution, "taskExecution must not be null"); this.taskExecution = taskExecution; this.taskManifest = taskManifest; if (jobExecutionIds == null) { @@ -67,8 +59,10 @@ public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionI else { this.jobExecutionIds = Collections.unmodifiableList(new ArrayList<>(jobExecutionIds)); } + this.composedTaskJobExecution = composedTaskJobExecution; } + /** * @return the taskExecution for this relationship. */ @@ -90,4 +84,8 @@ public List getJobExecutionIds() { public TaskManifest getTaskManifest() { return taskManifest; } + + public TaskJobExecution getComposedTaskJobExecution() { + return composedTaskJobExecution; + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java index 5c0da365bf..b44406510d 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java @@ -16,7 +16,7 @@ package org.springframework.cloud.dataflow.rest.resource; -import java.util.HashSet; +import java.util.Set; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; @@ -28,6 +28,7 @@ * @author Mark Fisher * @author Patrick Peralta * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class AppRegistrationResource extends RepresentationModel { @@ -46,6 +47,10 @@ public class AppRegistrationResource extends RepresentationModel versions; + private Set versions; /** * The label name of the application. @@ -73,7 +78,7 @@ protected AppRegistrationResource() { } public AppRegistrationResource(String name, String type, String uri) { - this(name, type, null, uri, false); + this(name, type, null, uri, null, false); } /** @@ -85,11 +90,12 @@ public AppRegistrationResource(String name, String type, String uri) { * @param uri uri for app resource * @param defaultVersion is this application selected to the be default version in DSL */ - public AppRegistrationResource(String name, String type, String version, String uri, Boolean defaultVersion) { + public AppRegistrationResource(String name, String type, String version, String uri, String metaDataUri, Boolean defaultVersion) { this.name = name; this.type = type; this.version = version; this.uri = uri; + this.metaDataUri = metaDataUri; this.defaultVersion = defaultVersion; } @@ -100,14 +106,16 @@ public AppRegistrationResource(String name, String type, String version, String * @param type app type * @param version app version * @param uri uri for app resource + * @param metaDataUri uri for app metadata * @param defaultVersion is this application selected to the be default version in DSL * @param versions all the registered versions of this application */ - public AppRegistrationResource(String name, String type, String version, String uri, Boolean defaultVersion, HashSet versions) { + public AppRegistrationResource(String name, String type, String version, String uri, String metaDataUri, Boolean defaultVersion, Set versions) { this.name = name; this.type = type; this.version = version; this.uri = uri; + this.metaDataUri = metaDataUri; this.defaultVersion = defaultVersion; this.versions = versions; } @@ -119,21 +127,22 @@ public AppRegistrationResource(String name, String type, String version, String * @param type app type * @param version app version * @param uri uri for app resource + * @param metaDataUri uri for app metadata * @param defaultVersion is this application selected to the be default version in DSL * @param versions all the registered versions of this application * @param label the label name of the application */ - public AppRegistrationResource(String name, String type, String version, String uri, Boolean defaultVersion, HashSet versions, String label) { + public AppRegistrationResource(String name, String type, String version, String uri, String metaDataUri, Boolean defaultVersion, Set versions, String label) { this.name = name; this.type = type; this.version = version; this.uri = uri; + this.metaDataUri = metaDataUri; this.defaultVersion = defaultVersion; this.versions = versions; this.label = label; } - /** * @return the name of the app */ @@ -172,7 +181,7 @@ public Boolean getDefaultVersion() { /** * @return all the available versions of the app */ - public HashSet getVersions() { + public Set getVersions() { return this.versions; } @@ -184,6 +193,10 @@ public void setLabel(String label) { this.label = label; } + public String getMetaDataUri() { + return metaDataUri; + } + /** * Dedicated subclass to workaround type erasure. */ diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppStatusResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppStatusResource.java index 6ab5dd0626..ad20f272b0 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppStatusResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppStatusResource.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,12 +16,11 @@ package org.springframework.cloud.dataflow.rest.resource; -import org.apache.commons.lang.StringUtils; - import org.springframework.cloud.dataflow.core.StreamRuntimePropertyKeys; import org.springframework.hateoas.CollectionModel; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; +import org.springframework.util.StringUtils; /** * REST representation of an app status. @@ -53,7 +52,7 @@ public String getName() { if (this.instances != null && this.instances.iterator().hasNext()) { AppInstanceStatusResource instance = this.instances.iterator().next(); return (instance != null && instance.getAttributes() != null && - !StringUtils.isEmpty(instance.getAttributes().get(StreamRuntimePropertyKeys.ATTRIBUTE_SKIPPER_APPLICATION_NAME))) ? + StringUtils.hasText(instance.getAttributes().get(StreamRuntimePropertyKeys.ATTRIBUTE_SKIPPER_APPLICATION_NAME))) ? instance.getAttributes().get(StreamRuntimePropertyKeys.ATTRIBUTE_SKIPPER_APPLICATION_NAME) : NO_INSTANCES; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java index 18d2ece755..46d9d5dee3 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2016 the original author or authors. + * Copyright 2015-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,8 +17,10 @@ package org.springframework.cloud.dataflow.rest.resource; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import org.springframework.boot.configurationmetadata.ConfigurationMetadataProperty; @@ -55,6 +57,11 @@ public class DetailedAppRegistrationResource extends AppRegistrationResource { */ private final Set outboundPortNames = new HashSet<>(); + /** + * Option groups configured for the app. + */ + private final Map> optionGroups = new HashMap<>(); + /** * Default constructor for serialization frameworks. */ @@ -70,8 +77,8 @@ protected DetailedAppRegistrationResource() { * @param coordinates Maven coordinates for the application artifact * @param isDefault is this the default app */ - public DetailedAppRegistrationResource(String name, String type, String version, String coordinates, Boolean isDefault) { - super(name, type, version, coordinates, isDefault); + public DetailedAppRegistrationResource(String name, String type, String version, String coordinates, Boolean isDefault) { + super(name, type, version, coordinates, null, isDefault); } /** @@ -82,7 +89,7 @@ public DetailedAppRegistrationResource(String name, String type, String version, * data */ public DetailedAppRegistrationResource(AppRegistrationResource resource) { - super(resource.getName(), resource.getType(), resource.getVersion(), resource.getUri(), resource.getDefaultVersion()); + super(resource.getName(), resource.getType(), resource.getVersion(), resource.getUri(), resource.getMetaDataUri(), resource.getDefaultVersion()); } /** @@ -150,6 +157,15 @@ public String getShortDescription() { return shortDescription; } + /** + * Return an option groups. + * + * @return the option groups + */ + public Map> getOptionGroups() { + return optionGroups; + } + /** * Set a description for this application. * diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java index cc5dd8038d..ea2807a041 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,8 +16,10 @@ package org.springframework.cloud.dataflow.rest.resource; -import java.text.DateFormat; -import java.util.Date; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.Properties; import java.util.TimeZone; @@ -29,7 +31,6 @@ import org.springframework.batch.support.PropertiesConverter; import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.support.JobUtils; -import org.springframework.cloud.dataflow.rest.job.support.TimeUtils; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; @@ -41,16 +42,15 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class JobExecutionResource extends RepresentationModel { private static final String LINE_SEPARATOR = System.getProperty("line.separator"); - private DateFormat dateFormat = TimeUtils.getDefaultDateFormat(); + private DateTimeFormatter dateFormat = DateTimeFormatter.ISO_LOCAL_DATE; - private DateFormat timeFormat = TimeUtils.getDefaultTimeFormat(); - - private DateFormat durationFormat = TimeUtils.getDefaultDurationFormat(); + private DateTimeFormatter timeFormat = DateTimeFormatter.ISO_LOCAL_TIME; private Long executionId; @@ -104,7 +104,7 @@ public JobExecutionResource(TaskJobExecution taskJobExecution, TimeZone timeZone this.executionId = jobExecution.getId(); this.jobId = jobExecution.getJobId(); this.stepExecutionCount = taskJobExecution.getStepExecutionCount(); - this.jobParameters =converter.getProperties(jobExecution.getJobParameters()); + this.jobParameters = converter.getProperties(jobExecution.getJobParameters()); this.jobParametersString = fromJobParameters( this.argumentSanitizer.sanitizeJobParameters(jobExecution.getJobParameters())); this.defined = taskJobExecution.isTaskDefined(); @@ -114,23 +114,19 @@ public JobExecutionResource(TaskJobExecution taskJobExecution, TimeZone timeZone this.restartable = JobUtils.isJobExecutionRestartable(jobExecution); this.abandonable = JobUtils.isJobExecutionAbandonable(jobExecution); this.stoppable = JobUtils.isJobExecutionStoppable(jobExecution); - } - else { + } else { this.name = "?"; } - // Duration is always in GMT - durationFormat.setTimeZone(TimeUtils.getDefaultTimeZone()); - // The others can be localized - timeFormat.setTimeZone(timeZone); - dateFormat.setTimeZone(timeZone); if (jobExecution.getStartTime() != null) { - this.startDate = dateFormat.format(jobExecution.getStartTime()); - this.startTime = timeFormat.format(jobExecution.getStartTime()); - Date endTime = jobExecution.getEndTime() != null ? jobExecution.getEndTime() : new Date(); - this.duration = durationFormat.format(new Date(endTime.getTime() - jobExecution.getStartTime().getTime())); + // We assume the startTime is date time from current timezone. + // if the timezone provided is different from the current we have to assume we need a representation in that timezone. + this.startDate = dateFormat.format(ZonedDateTime.of(jobExecution.getStartTime(), TimeZone.getDefault().toZoneId()).withZoneSameInstant(timeZone.toZoneId())); + this.startTime = timeFormat.format(ZonedDateTime.of(jobExecution.getStartTime(), TimeZone.getDefault().toZoneId()).withZoneSameInstant(timeZone.toZoneId())); + // We assume start time and end time are from current timezone. + LocalDateTime endTime = jobExecution.getEndTime() != null ? jobExecution.getEndTime() : LocalDateTime.now(); + this.duration = String.valueOf(Duration.between(jobExecution.getStartTime(), endTime)); } - } public TimeZone getTimeZone() { diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java index 6fb8f7b2fe..3a0ad85929 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionThinResource.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2019 the original author or authors. + * Copyright 2018-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,8 +16,10 @@ package org.springframework.cloud.dataflow.rest.resource; -import java.text.DateFormat; -import java.util.Date; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; import java.util.Properties; import java.util.TimeZone; @@ -40,6 +42,7 @@ * A HATEOAS representation of a JobExecution without the StepExecutions. * * @author Glenn Renfro + * @author Corneil du Plessis * * @since 2.0 */ @@ -47,12 +50,6 @@ public class JobExecutionThinResource extends RepresentationModel { + private long executionId; + + public LaunchResponseResource() { + } + + public LaunchResponseResource(long executionId) { + this.executionId = executionId; + } + + public long getExecutionId() { + return executionId; + } + + public void setExecutionId(long executionId) { + this.executionId = executionId; + } + +} diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java index 3058f81999..b392fe94a7 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java @@ -56,7 +56,7 @@ private StepExecutionResource() { this.stepExecution = null; this.jobExecutionId = null; this.stepType = null; - } + } /** * @return The jobExecutionId, which will never be null diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java index 0d91ce1513..3241eae6e8 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,19 +16,21 @@ package org.springframework.cloud.dataflow.rest.resource; +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; -import java.util.Date; import java.util.List; import java.util.Map; import org.springframework.batch.core.JobExecution; import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * A HATEOAS representation of a TaskExecution. @@ -36,6 +38,7 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class TaskExecutionResource extends RepresentationModel { @@ -57,12 +60,12 @@ public class TaskExecutionResource extends RepresentationModel(); } @@ -136,6 +143,9 @@ public TaskExecutionResource(TaskJobExecutionRel taskJobExecutionRel) { this.endTime = taskJobExecutionRel.getTaskExecution().getEndTime(); this.errorMessage = taskJobExecutionRel.getTaskExecution().getErrorMessage(); this.externalExecutionId = taskJobExecutionRel.getTaskExecution().getExternalExecutionId(); + if(taskJobExecutionRel.getTaskManifest() != null) { + this.platformName = taskJobExecutionRel.getTaskManifest().getPlatformName(); + } if (taskJobExecutionRel.getJobExecutionIds() == null) { this.jobExecutionIds = Collections.emptyList(); } @@ -151,6 +161,9 @@ public TaskExecutionResource(TaskJobExecutionRel taskJobExecutionRel) { if(taskJobExecutionRel.getTaskManifest() != null) { this.platformName = taskJobExecutionRel.getTaskManifest().getPlatformName(); } + this.composedTaskJobExecutionStatus = (taskJobExecutionRel.getComposedTaskJobExecution() != null) ? + taskJobExecutionRel.getComposedTaskJobExecution().getJobExecution().getExitStatus().getExitCode() : + null; } /** @@ -158,8 +171,9 @@ public TaskExecutionResource(TaskJobExecutionRel taskJobExecutionRel) { * {@link TaskExecution}. * * @param taskExecution contains the {@link TaskExecution} + * @param composedTaskJobExecution the optional composed task execution. */ - public TaskExecutionResource(TaskExecution taskExecution) { + public TaskExecutionResource(TaskExecution taskExecution, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); this.executionId = taskExecution.getExecutionId(); this.exitCode = taskExecution.getExitCode(); @@ -170,6 +184,9 @@ public TaskExecutionResource(TaskExecution taskExecution) { this.endTime = taskExecution.getEndTime(); this.errorMessage = taskExecution.getErrorMessage(); this.externalExecutionId = taskExecution.getExternalExecutionId(); + this.composedTaskJobExecutionStatus = (composedTaskJobExecution != null) ? + composedTaskJobExecution.getJobExecution().getExitStatus().getExitCode() : + null; } /** @@ -178,8 +195,9 @@ public TaskExecutionResource(TaskExecution taskExecution) { * * @param taskExecution contains the {@link TaskExecution} * @param taskManifest contains the (@link TaskManifest} + * @param composedTaskJobExecution The optional composed task execution. */ - public TaskExecutionResource(TaskExecution taskExecution, TaskManifest taskManifest) { + public TaskExecutionResource(TaskExecution taskExecution, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); Assert.notNull(taskManifest, "taskManifest must not be null"); this.executionId = taskExecution.getExecutionId(); @@ -194,6 +212,9 @@ public TaskExecutionResource(TaskExecution taskExecution, TaskManifest taskManif this.resourceUrl = taskManifest.getTaskDeploymentRequest().getResource().toString(); this.appProperties = taskManifest.getTaskDeploymentRequest().getDefinition().getProperties(); this.deploymentProperties = taskManifest.getTaskDeploymentRequest().getDeploymentProperties(); + this.composedTaskJobExecutionStatus = (composedTaskJobExecution != null) ? + composedTaskJobExecution.getJobExecution().getExitStatus().getExitCode() : + null; } public long getExecutionId() { @@ -212,11 +233,11 @@ public String getTaskName() { return taskName; } - public Date getStartTime() { + public LocalDateTime getStartTime() { return startTime; } - public Date getEndTime() { + public LocalDateTime getEndTime() { return endTime; } @@ -264,6 +285,10 @@ public void setPlatformName(String platformName) { this.platformName = platformName; } + public void setTaskExecutionStatus(String taskExecutionStatus) { + this.taskExecutionStatus = taskExecutionStatus; + } + /** * Returns the calculated status of this {@link TaskExecution}. * @@ -279,6 +304,9 @@ public void setPlatformName(String platformName) { * @return TaskExecutionStatus, never null */ public TaskExecutionStatus getTaskExecutionStatus() { + if (StringUtils.hasText(this.taskExecutionStatus)) { + return TaskExecutionStatus.valueOf(this.taskExecutionStatus); + } if (this.startTime == null) { return TaskExecutionStatus.UNKNOWN; } @@ -286,6 +314,13 @@ public TaskExecutionStatus getTaskExecutionStatus() { return TaskExecutionStatus.RUNNING; } else { + if (this.composedTaskJobExecutionStatus != null) { + return (this.composedTaskJobExecutionStatus.equals("ABANDONED") || + this.composedTaskJobExecutionStatus.equals("FAILED") || + this.composedTaskJobExecutionStatus.equals("STOPPED")) ? + TaskExecutionStatus.ERROR : TaskExecutionStatus.COMPLETE; + } + return (this.exitCode == null) ? TaskExecutionStatus.RUNNING : ((this.exitCode == 0) ? TaskExecutionStatus.COMPLETE : TaskExecutionStatus.ERROR); } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionThinResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionThinResource.java new file mode 100644 index 0000000000..f7598bede8 --- /dev/null +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionThinResource.java @@ -0,0 +1,187 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.rest.resource; + +import java.time.LocalDateTime; + +import org.springframework.cloud.dataflow.core.ThinTaskExecution; +import org.springframework.hateoas.PagedModel; +import org.springframework.hateoas.RepresentationModel; + + +/** + * This resource is a match for AggregateTaskExecution and should satisfy UI paging. + * @author Corneil du Plessis + */ +public class TaskExecutionThinResource extends RepresentationModel { + /** + * The unique id associated with the task execution. + */ + private long executionId; + + /** + * The parent task execution id. + */ + private Long parentExecutionId; + + /** + * The recorded exit code for the task. + */ + private Integer exitCode; + + /** + * User defined name for the task. + */ + private String taskName; + + /** + * Time of when the task was started. + */ + private LocalDateTime startTime; + + /** + * Timestamp of when the task was completed/terminated. + */ + private LocalDateTime endTime; + + /** + * Message returned from the task or stacktrace. + */ + private String exitMessage; + + private String externalExecutionId; + + + private String errorMessage; + + private String composedTaskJobExecutionStatus; + + public TaskExecutionThinResource() { + } + + public TaskExecutionThinResource(ThinTaskExecution taskExecution) { + this.executionId = taskExecution.getExecutionId(); + this.taskName = taskExecution.getTaskName(); + this.externalExecutionId = taskExecution.getExternalExecutionId(); + this.parentExecutionId =taskExecution.getParentExecutionId(); + this.startTime = taskExecution.getStartTime(); + this.endTime = taskExecution.getEndTime(); + this.exitCode = taskExecution.getExitCode(); + this.exitMessage = taskExecution.getExitMessage(); + this.errorMessage = taskExecution.getErrorMessage(); + this.composedTaskJobExecutionStatus = taskExecution.getCtrTaskStatus(); + } + + public long getExecutionId() { + return executionId; + } + + public void setExecutionId(long executionId) { + this.executionId = executionId; + } + + public Long getParentExecutionId() { + return parentExecutionId; + } + + public void setParentExecutionId(Long parentExecutionId) { + this.parentExecutionId = parentExecutionId; + } + + public Integer getExitCode() { + return exitCode; + } + + public void setExitCode(Integer exitCode) { + this.exitCode = exitCode; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public LocalDateTime getStartTime() { + return startTime; + } + + public void setStartTime(LocalDateTime startTime) { + this.startTime = startTime; + } + + public LocalDateTime getEndTime() { + return endTime; + } + + public void setEndTime(LocalDateTime endTime) { + this.endTime = endTime; + } + + public String getExitMessage() { + return exitMessage; + } + + public void setExitMessage(String exitMessage) { + this.exitMessage = exitMessage; + } + + public String getExternalExecutionId() { + return externalExecutionId; + } + + public void setExternalExecutionId(String externalExecutionId) { + this.externalExecutionId = externalExecutionId; + } + + public String getErrorMessage() { + return errorMessage; + } + + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + public String getComposedTaskJobExecutionStatus() { + return composedTaskJobExecutionStatus; + } + + public void setComposedTaskJobExecutionStatus(String composedTaskJobExecutionStatus) { + this.composedTaskJobExecutionStatus = composedTaskJobExecutionStatus; + } + + public TaskExecutionStatus getTaskExecutionStatus() { + if (this.startTime == null) { + return TaskExecutionStatus.UNKNOWN; + } + if (this.endTime == null) { + return TaskExecutionStatus.RUNNING; + } + if (this.composedTaskJobExecutionStatus != null) { + return (this.composedTaskJobExecutionStatus.equals("ABANDONED") || + this.composedTaskJobExecutionStatus.equals("FAILED") || + this.composedTaskJobExecutionStatus.equals("STOPPED")) ? + TaskExecutionStatus.ERROR : TaskExecutionStatus.COMPLETE; + } + return (this.exitCode == null) ? TaskExecutionStatus.RUNNING : + ((this.exitCode == 0) ? TaskExecutionStatus.COMPLETE : TaskExecutionStatus.ERROR); + } + + public static class Page extends PagedModel { + } +} diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java index 9c1133e982..188518951c 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/AboutResource.java @@ -16,12 +16,16 @@ package org.springframework.cloud.dataflow.rest.resource.about; +import java.util.HashMap; +import java.util.Map; + import org.springframework.hateoas.RepresentationModel; /** * Provides meta-information about the Spring Cloud Data Flow server. * * @author Gunnar Hillert + * @author Felipe Gutierrez */ public class AboutResource extends RepresentationModel { @@ -35,6 +39,8 @@ public class AboutResource extends RepresentationModel { private MonitoringDashboardInfo monitoringDashboardInfo = new MonitoringDashboardInfo(); + private Map gitAndBuildInfo = new HashMap<>(); + /** * Default constructor for serialization frameworks. */ @@ -80,4 +86,12 @@ public MonitoringDashboardInfo getMonitoringDashboardInfo() { public void setMonitoringDashboardInfo(MonitoringDashboardInfo monitoringDashboardInfo) { this.monitoringDashboardInfo = monitoringDashboardInfo; } + + public Map getGitAndBuildInfo() { + return gitAndBuildInfo; + } + + public void setGitAndBuildInfo(Map gitAndBuildInfo) { + this.gitAndBuildInfo = gitAndBuildInfo; + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/MonitoringDashboardInfo.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/MonitoringDashboardInfo.java index e748fb062e..d7cc9ad2f0 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/MonitoringDashboardInfo.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/about/MonitoringDashboardInfo.java @@ -16,6 +16,8 @@ package org.springframework.cloud.dataflow.rest.resource.about; +import java.util.Locale; + import org.springframework.util.StringUtils; /** @@ -76,7 +78,7 @@ public void setDashboardType(MonitoringDashboardType dashboardType) { } public String getSource() { - return StringUtils.isEmpty(source) ? source : source.toLowerCase(); + return StringUtils.isEmpty(source) ? source : source.toLowerCase(Locale.ROOT); } public void setSource(String source) { diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/ExitStatusJacksonMixIn.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/ExitStatusJacksonMixIn.java similarity index 94% rename from spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/ExitStatusJacksonMixIn.java rename to spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/ExitStatusJacksonMixIn.java index aeb48cc1f3..af22f79223 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/ExitStatusJacksonMixIn.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/ExitStatusJacksonMixIn.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.rest.client.support; +package org.springframework.cloud.dataflow.rest.support.jackson; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/Jackson2DataflowModule.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/Jackson2DataflowModule.java new file mode 100644 index 0000000000..4349bd96c6 --- /dev/null +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/Jackson2DataflowModule.java @@ -0,0 +1,50 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.rest.support.jackson; + +import com.fasterxml.jackson.core.Version; +import com.fasterxml.jackson.databind.module.SimpleModule; + +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameter; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.item.ExecutionContext; +import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; + +/** + * Jackson 2 module to handle dataflow related instances like batch. + * + * @author Janne Valkealahti + */ +public class Jackson2DataflowModule extends SimpleModule { + + public Jackson2DataflowModule() { + super("spring-cloud-dataflow-module", new Version(1, 0, 0, null, "org.springframework.cloud", "spring-cloud-dataflow")); + + setMixInAnnotation(JobExecution.class, JobExecutionJacksonMixIn.class); + setMixInAnnotation(JobParameters.class, JobParametersJacksonMixIn.class); + setMixInAnnotation(JobParameter.class, JobParameterJacksonMixIn.class); + setMixInAnnotation(JobInstance.class, JobInstanceJacksonMixIn.class); + setMixInAnnotation(ExitStatus.class, ExitStatusJacksonMixIn.class); + setMixInAnnotation(StepExecution.class, StepExecutionJacksonMixIn.class); + setMixInAnnotation(ExecutionContext.class, ExecutionContextJacksonMixIn.class); + setMixInAnnotation(StepExecutionHistory.class, StepExecutionHistoryJacksonMixIn.class); + } +} diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobExecutionJacksonMixIn.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobExecutionJacksonMixIn.java similarity index 94% rename from spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobExecutionJacksonMixIn.java rename to spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobExecutionJacksonMixIn.java index e7be99014f..023f0d16dd 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobExecutionJacksonMixIn.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobExecutionJacksonMixIn.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.rest.client.support; +package org.springframework.cloud.dataflow.rest.support.jackson; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobInstanceJacksonMixIn.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobInstanceJacksonMixIn.java similarity index 94% rename from spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobInstanceJacksonMixIn.java rename to spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobInstanceJacksonMixIn.java index 6e7993aa95..5170d1ba7f 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobInstanceJacksonMixIn.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobInstanceJacksonMixIn.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.rest.client.support; +package org.springframework.cloud.dataflow.rest.support.jackson; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobParameterJacksonDeserializer.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializer.java similarity index 56% rename from spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobParameterJacksonDeserializer.java rename to spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializer.java index 25e20dd300..4e82edf565 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/JobParameterJacksonDeserializer.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializer.java @@ -14,17 +14,15 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.rest.client.support; +package org.springframework.cloud.dataflow.rest.support.jackson; import java.io.IOException; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.ObjectCodec; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; -import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,38 +40,30 @@ public class JobParameterJacksonDeserializer extends JsonDeserializer> getIdentifyingParameters(); } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/StepExecutionHistoryJacksonMixIn.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionHistoryJacksonMixIn.java similarity index 85% rename from spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/StepExecutionHistoryJacksonMixIn.java rename to spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionHistoryJacksonMixIn.java index 1e65150a8a..31aedd841c 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/support/StepExecutionHistoryJacksonMixIn.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionHistoryJacksonMixIn.java @@ -14,10 +14,9 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.rest.client.support; +package org.springframework.cloud.dataflow.rest.support.jackson; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; /** @@ -28,7 +27,6 @@ * @since 1.0 */ -@JsonIgnoreProperties({ "count", "durationPerRead" }) public abstract class StepExecutionHistoryJacksonMixIn { @JsonCreator diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixIn.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixIn.java index b9e3a1129a..d71b9c70b7 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixIn.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixIn.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,21 +16,36 @@ package org.springframework.cloud.dataflow.rest.support.jackson; -import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty.Access; -import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; /** - * Jackson MixIn for {@link StepExecution} serialization. This MixIn excludes the - * {@link JobExecution} from being serialized. This is due to the fact that it would cause - * a {@link StackOverflowError} due to a circular reference. + * Jackson MixIn for {@link StepExecution} de-serialization. * * @author Gunnar Hillert * @since 1.0 */ +@JsonIgnoreProperties({ "jobExecution" }) public abstract class StepExecutionJacksonMixIn { - @JsonIgnore - abstract JobExecution getJobExecution(); + @JsonCreator + StepExecutionJacksonMixIn(@JsonProperty("stepName") String stepName) { + } + + @JsonProperty(access = Access.READ_ONLY) + abstract int getSkipCount(); + + @JsonProperty(access = Access.READ_ONLY) + abstract String getSummary(); + + @JsonProperty(access = Access.READ_ONLY) + abstract JobParameters getJobParameters(); + + @JsonProperty(access = Access.READ_ONLY) + abstract Long getJobExecutionId(); } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java index a320fc7b06..4ee53de6ac 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ArgumentSanitizer.java @@ -17,18 +17,27 @@ package org.springframework.cloud.dataflow.rest.util; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.JobParameters; import org.springframework.cloud.dataflow.core.DefinitionUtils; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.core.dsl.graph.Graph; +import org.springframework.http.HttpHeaders; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -38,17 +47,26 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class ArgumentSanitizer { + private final static Logger logger = LoggerFactory.getLogger(ArgumentSanitizer.class); - private static final String[] REGEX_PARTS = { "*", "$", "^", "+" }; + private static final String[] REGEX_PARTS = {"*", "$", "^", "+"}; private static final String REDACTION_STRING = "******"; - private static final String[] KEYS_TO_SANITIZE = { "username", "password", "secret", "key", "token", ".*credentials.*", - "vcap_services", "url" }; + private static final String[] KEYS_TO_SANITIZE = {"username", "password", "secret", "key", "token", ".*credentials.*", + "vcap_services", "url"}; + + private final static TypeReference> mapTypeReference = new TypeReference<>() { + }; + + private final ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); - private Pattern[] keysToSanitize; + private final ObjectMapper jsonMapper = new ObjectMapper(); + + private final Pattern[] keysToSanitize; public ArgumentSanitizer() { this.keysToSanitize = new Pattern[KEYS_TO_SANITIZE.length]; @@ -80,6 +98,10 @@ private boolean isRegex(String value) { * @return the argument with a potentially sanitized value */ public String sanitize(String argument) { + // Oracle handles an empty string as a null. + if (argument == null) { + return ""; + } int indexOfFirstEqual = argument.indexOf("="); if (indexOfFirstEqual == -1) { return argument; @@ -95,7 +117,7 @@ public String sanitize(String argument) { /** * Replaces a potential secure value with "******". * - * @param key to check for sensitive words. + * @param key to check for sensitive words. * @param value the argument to cleanse. * @return the argument with a potentially sanitized value */ @@ -118,13 +140,12 @@ public String sanitize(String key, String value) { * @return the sanitized job parameters */ public JobParameters sanitizeJobParameters(JobParameters jobParameters) { - Map newJobParameters = new HashMap<>(); - jobParameters.getParameters().forEach( (key, jobParameter) -> { + Map> newJobParameters = new HashMap<>(); + jobParameters.getParameters().forEach((key, jobParameter) -> { String updatedKey = !jobParameter.isIdentifying() ? "-" + key : key; - if (jobParameter.getType().equals(JobParameter.ParameterType.STRING)) { - newJobParameters.put(updatedKey, new JobParameter(this.sanitize(key, jobParameter.toString()))); - } - else { + if (jobParameter.getType().isAssignableFrom(String.class)) { + newJobParameters.put(updatedKey, new JobParameter<>(this.sanitize(key, jobParameter.toString()), String.class)); + } else { newJobParameters.put(updatedKey, jobParameter); } }); @@ -138,17 +159,15 @@ public JobParameters sanitizeJobParameters(JobParameters jobParameters) { * @return Task definition text that has sensitive data redacted. */ public String sanitizeTaskDsl(TaskDefinition taskDefinition) { - if(StringUtils.isEmpty(taskDefinition.getDslText())) { + if (!StringUtils.hasText(taskDefinition.getDslText())) { return taskDefinition.getDslText(); } TaskParser taskParser = new TaskParser(taskDefinition.getTaskName(), taskDefinition.getDslText(), true, true); Graph graph = taskParser.parse().toGraph(); - graph.getNodes().stream().forEach(node -> { + graph.getNodes().forEach(node -> { if (node.properties != null) { - node.properties.keySet().stream().forEach(key -> { - node.properties.put(key, - DefinitionUtils.autoQuotes(sanitize(key, node.properties.get(key)))); - }); + node.properties.keySet().forEach(key -> node.properties.put(key, + DefinitionUtils.autoQuotes(sanitize(key, node.properties.get(key))))); } }); return graph.toDSLText(); @@ -157,13 +176,14 @@ public String sanitizeTaskDsl(TaskDefinition taskDefinition) { /** * For all sensitive properties (e.g. key names containing words like password, secret, * key, token) replace the value with '*****' string + * * @param properties to be sanitized * @return sanitized properties */ public Map sanitizeProperties(Map properties) { if (!CollectionUtils.isEmpty(properties)) { final Map sanitizedProperties = new LinkedHashMap<>(properties.size()); - for (Map.Entry property : properties.entrySet()) { + for (Map.Entry property : properties.entrySet()) { sanitizedProperties.put(property.getKey(), this.sanitize(property.getKey(), property.getValue())); } return sanitizedProperties; @@ -174,6 +194,7 @@ public Map sanitizeProperties(Map properties) { /** * For all sensitive arguments (e.g. key names containing words like password, secret, * key, token) replace the value with '*****' string + * * @param arguments to be sanitized * @return sanitized arguments */ @@ -187,4 +208,99 @@ public List sanitizeArguments(List arguments) { } return arguments; } + + public HttpHeaders sanitizeHeaders(HttpHeaders headers) { + HttpHeaders result = new HttpHeaders(); + for (Map.Entry> entry : headers.entrySet()) { + List values = entry.getValue(); + for (String value : values) { + result.add(entry.getKey(), sanitize(entry.getKey(), value)); + } + } + return result; + } + + /** + * Will replace sensitive string value in the Map with '*****' + * + * @param input to be sanitized + * @return the sanitized map. + */ + @SuppressWarnings("unchecked") + public Map sanitizeMap(Map input) { + Map result = new HashMap<>(); + for (Map.Entry entry : input.entrySet()) { + if (entry.getValue() instanceof String) { + result.put(entry.getKey(), sanitize(entry.getKey(), (String) entry.getValue())); + } else if (entry.getValue() instanceof Map) { + Map map = (Map) entry.getValue(); + result.put(entry.getKey(), sanitizeMap(map)); + } else { + result.put(entry.getKey(), entry.getValue()); + } + } + return result; + } + + /** + * Will replace the sensitive string fields with '*****' + * + * @param input to be sanitized + * @return The sanitized JSON string + * @throws JsonProcessingException from mapper. + */ + public String sanitizeJsonString(String input) throws JsonProcessingException { + if (input == null) { + return null; + } + Map data = jsonMapper.readValue(input, mapTypeReference); + return jsonMapper.writeValueAsString(sanitizeMap(data)); + } + + /** + * Will replace the sensitive string fields with '*****' + * + * @param input to be sanitized + * @return The sanitized YAML string + * @throws JsonProcessingException from mapper + */ + public String sanitizeYamlString(String input) throws JsonProcessingException { + if (input == null) { + return null; + } + Map data = yamlMapper.readValue(input, mapTypeReference); + return yamlMapper.writeValueAsString(sanitizeMap(data)); + } + + /** + * Will determine the type of data and treat as JSON or YAML to sanitize sensitive values. + * + * @param input to be sanitized + * @return the sanitized string + */ + @SuppressWarnings("StringConcatenationArgumentToLogCall") + public String sanitizeJsonOrYamlString(String input) { + if (input == null) { + return null; + } + try { // Try parsing as JSON + return sanitizeJsonString(input); + } catch (Throwable x) { + logger.trace("Cannot parse as JSON:" + x); + } + try { + return sanitizeYamlString(input); + } catch (Throwable x) { + logger.trace("Cannot parse as YAML:" + x); + } + if (input.contains("\n")) { + //noinspection DataFlowIssue + return StringUtils.collectionToDelimitedString(sanitizeArguments(Arrays.asList(StringUtils.split(input, "\n"))), "\n"); + } + if (input.contains("--")) { + //noinspection DataFlowIssue + return StringUtils.collectionToDelimitedString(sanitizeArguments(Arrays.asList(StringUtils.split(input, "--"))), "--"); + } + return sanitize(input); + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java index e9ba376a78..6d5f3823f4 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,11 +33,14 @@ import java.util.stream.Collectors; import org.apache.commons.io.FilenameUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; import org.springframework.core.io.FileSystemResource; import org.springframework.util.StringUtils; + /** * Provides utility methods for formatting and parsing deployment properties. * @@ -47,9 +50,10 @@ * @author Christian Tzolov * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Glenn Renfro */ public final class DeploymentPropertiesUtils { - + private static final Logger logger = LoggerFactory.getLogger(DeploymentPropertiesUtils.class); /** * Pattern used for parsing a String of command-line arguments. */ @@ -57,7 +61,7 @@ public final class DeploymentPropertiesUtils { .compile("(\\s(?=" + "([^\\\"']*[\\\"'][^\\\"']*[\\\"'])*[^\\\"']*$))"); - private static final String[] DEPLOYMENT_PROPERTIES_PREFIX ={"deployer", "app", "version", "scheduler", "spring.cloud.dataflow.task"}; + private static final String[] DEPLOYMENT_PROPERTIES_PREFIX ={"deployer", "app", "version", "spring.cloud.dataflow.task"}; private DeploymentPropertiesUtils() { // prevent instantiation @@ -85,6 +89,7 @@ public static Map parse(String s) { for (String pair : pairs) { addKeyValuePairAsProperty(pair, deploymentProperties); } + logger.debug("parse:{}={}", s, deploymentProperties); return deploymentProperties; } @@ -108,23 +113,25 @@ public static List parseParamList(String s, String delimiter) { // get raw candidates as simple comma split String[] candidates = StringUtils.delimitedListToStringArray(s, delimiter); for (int i = 0; i < candidates.length; i++) { - if (i > 0 && !candidates[i].contains("=") || (i > 0 && candidates[i].contains("=") && !startsWithDeploymentPropertyPrefix(candidates[i]))) { - // we don't have '=' so this has to be latter parts of - // a comma delimited value, append it to previously added - // key/value pair. - // we skip first as we would not have anything to append to. this - // would happen if dep prop string is malformed and first given - // key/value pair is not actually a key/value. - pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidates[i]); - } - else { - // we have a key/value pair having '=', or malformed first pair - if (!startsWithDeploymentPropertyPrefix(candidates[i])) { - throw new IllegalArgumentException( - "Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.'" + - " allowed."); + String candidate = candidates[i]; + if(StringUtils.hasText(candidate)) { + if (i > 0 && !candidate.contains("=") || (i > 0 && candidate.contains("=") && !startsWithDeploymentPropertyPrefix(candidate))) { + // we don't have '=' so this has to be latter parts of + // a comma delimited value, append it to previously added + // key/value pair. + // we skip first as we would not have anything to append to. this + // would happen if dep prop string is malformed and first given + // key/value pair is not actually a key/value. + pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidate); + } else { + // we have a key/value pair having '=', or malformed first pair + if (!startsWithDeploymentPropertyPrefix(candidate)) { + throw new IllegalArgumentException( + "Only deployment property keys starting with 'app.' or 'deployer.' or 'version.'" + + " allowed. Not " + candidate); + } + pairs.add(candidate); } - pairs.add(candidates[i]); } } @@ -154,7 +161,9 @@ public static List parseArgumentList(String s, String delimiter) { for (int i = 0; i < candidates.length; i++) { int elementsInQuotesIndex = findEndToken(candidates, i) +1; if (elementsInQuotesIndex > -1) { - pairs.add(candidates[i]); + if(!candidates[i].equals("")) { + pairs.add(candidates[i]); + } i++; for (; i < elementsInQuotesIndex; i++) { pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidates[i]); @@ -174,10 +183,22 @@ public static List parseArgumentList(String s, String delimiter) { } else { // we have a key/value pair having '=', or malformed first pair - pairs.add(candidates[i]); + if(!candidates[i].equals("")) { + int endToken = findEndToken(candidates, i); + if(endToken > -1) { + pairs.add(candidates[i] + " " + candidates[endToken]); + i = endToken; + } + else { + pairs.add(candidates[i]); + } + } } } - } + for(int i = 0; i < pairs.size(); i++) { + pairs.set(i, StringUtils.trimTrailingWhitespace(pairs.get(i))); + } + } return pairs; } @@ -265,9 +286,9 @@ public static void validateDeploymentProperties(Map properties) for (Entry property : properties.entrySet()) { String key = property.getKey(); if (!key.startsWith("app.") && !key.startsWith("deployer.") - && !key.startsWith("scheduler.") && !key.startsWith("version.")) { + && !key.startsWith("version.")) { throw new IllegalArgumentException( - "Only deployment property keys starting with 'app.', 'deployer.' or, 'scheduler.' allowed, got '" + key + "'"); + "Only deployment property keys starting with 'app.' or 'deployer.' allowed, got '" + key + "'"); } } } @@ -313,9 +334,9 @@ public static Map extractAndQualifyDeployerProperties(Map kv.getKey().startsWith(wildcardPrefix) || kv.getKey().startsWith(appPrefix)) .collect(Collectors.toMap(kv -> kv.getKey().startsWith(wildcardPrefix) ? "spring.cloud.deployer." + kv.getKey().substring(wildcardLength) - : "spring.cloud.deployer." + kv.getKey().substring(appLength), kv -> kv.getValue(), + : "spring.cloud.deployer." + kv.getKey().substring(appLength), Entry::getValue, (fromWildcard, fromApp) -> fromApp)); - + logger.debug("extractAndQualifyDeployerProperties:{}", result); return result; } @@ -340,15 +361,16 @@ public static Map qualifyDeployerProperties(Map .filter(kv -> kv.getKey().startsWith(wildcardPrefix) || kv.getKey().startsWith(appPrefix)) .collect(Collectors.toMap(kv -> kv.getKey().startsWith(wildcardPrefix) ? "spring.cloud.deployer." + kv.getKey().substring(wildcardLength) - : "spring.cloud.deployer." + kv.getKey().substring(appLength), kv -> kv.getValue(), + : "spring.cloud.deployer." + kv.getKey().substring(appLength), Entry::getValue, (fromWildcard, fromApp) -> fromApp)); Map resultApp = new TreeMap<>(input).entrySet().stream() .filter(kv -> !kv.getKey().startsWith(wildcardPrefix) && !kv.getKey().startsWith(appPrefix)) - .collect(Collectors.toMap(kv -> kv.getKey(), kv -> kv.getValue(), + .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (fromWildcard, fromApp) -> fromApp)); resultDeployer.putAll(resultApp); + logger.debug("qualifyDeployerProperties:{}", resultDeployer); return resultDeployer; } @@ -419,8 +441,8 @@ public static List removeQuoting(List params) { } start = regexMatcher.start(); } - if (param != null && param.length() > 0) { - String p = removeQuoting(param.substring(start, param.length()).trim()); + if (param != null && !param.isEmpty()) { + String p = removeQuoting(param.substring(start).trim()); if (StringUtils.hasText(p)) { paramsToUse.add(p); } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurer.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurer.java index f3a8eba709..f8eb05a8da 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurer.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurer.java @@ -17,21 +17,29 @@ import java.net.URI; -import org.apache.http.HttpHost; -import org.apache.http.HttpRequestInterceptor; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.client.ProxyAuthenticationStrategy; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.CredentialsProvider; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.impl.DefaultAuthenticationStrategy; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.client5.http.impl.auth.SystemDefaultCredentialsProvider; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager; +import org.apache.hc.client5.http.socket.ConnectionSocketFactory; +import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory; +import org.apache.hc.client5.http.ssl.NoopHostnameVerifier; +import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.HttpRequestInterceptor; +import org.apache.hc.core5.http.config.Registry; +import org.apache.hc.core5.http.config.RegistryBuilder; import org.springframework.http.client.ClientHttpRequestFactory; import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; import org.springframework.util.Assert; + /** * Utility for configuring a {@link CloseableHttpClient}. This class allows for * chained method invocation. If both basic auth credentials and a target host @@ -60,12 +68,18 @@ public static HttpClientConfigurer create(URI targetHost) { protected HttpClientConfigurer(URI targetHost) { httpClientBuilder = HttpClientBuilder.create(); - this.targetHost = new HttpHost(targetHost.getHost(), targetHost.getPort(), targetHost.getScheme()); + this.targetHost = new HttpHost(targetHost.getScheme(), targetHost.getHost(), targetHost.getPort()); } public HttpClientConfigurer basicAuthCredentials(String username, String password) { final CredentialsProvider credentialsProvider = this.getOrInitializeCredentialsProvider(); - credentialsProvider.setCredentials(new AuthScope(this.targetHost), new UsernamePasswordCredentials(username, password)); + if(credentialsProvider instanceof BasicCredentialsProvider basicCredentialsProvider) { + basicCredentialsProvider.setCredentials(new AuthScope(this.targetHost), + new UsernamePasswordCredentials(username, password.toCharArray())); + } else if (credentialsProvider instanceof SystemDefaultCredentialsProvider systemDefaultCredProvider) { + systemDefaultCredProvider.setCredentials(new AuthScope(this.targetHost), + new UsernamePasswordCredentials(username, password.toCharArray())); + } httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); useBasicAuth = true; return this; @@ -94,14 +108,18 @@ public HttpClientConfigurer withProxyCredentials(URI proxyUri, String proxyUsern Assert.hasText(proxyUri.getScheme(), "The scheme component of the proxyUri must not be empty."); httpClientBuilder - .setProxy(new HttpHost(proxyUri.getHost(), proxyUri.getPort(), proxyUri.getScheme())); + .setProxy(new HttpHost(proxyUri.getScheme(), proxyUri.getHost(), proxyUri.getPort())); if (proxyUsername !=null && proxyPassword != null) { final CredentialsProvider credentialsProvider = this.getOrInitializeCredentialsProvider(); - credentialsProvider.setCredentials( - new AuthScope(proxyUri.getHost(), proxyUri.getPort()), - new UsernamePasswordCredentials(proxyUsername, proxyPassword)); + if(credentialsProvider instanceof BasicCredentialsProvider basicCredentialsProvider) { + basicCredentialsProvider.setCredentials(new AuthScope(proxyUri.getHost(), proxyUri.getPort()), + new UsernamePasswordCredentials(proxyUsername, proxyPassword.toCharArray())); + } else if (credentialsProvider instanceof SystemDefaultCredentialsProvider systemDefaultCredProvider) { + systemDefaultCredProvider.setCredentials(new AuthScope(proxyUri.getHost(), proxyUri.getPort()), + new UsernamePasswordCredentials(proxyUsername, proxyPassword.toCharArray())); + } httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider) - .setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy()); + .setProxyAuthenticationStrategy(new DefaultAuthenticationStrategy()); } return this; } @@ -113,8 +131,14 @@ public HttpClientConfigurer withProxyCredentials(URI proxyUri, String proxyUsern * @return a reference to {@code this} to enable chained method invocation */ public HttpClientConfigurer skipTlsCertificateVerification() { - httpClientBuilder.setSSLContext(HttpUtils.buildCertificateIgnoringSslContext()); - httpClientBuilder.setSSLHostnameVerifier(new NoopHostnameVerifier()); + ConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(HttpUtils.buildCertificateIgnoringSslContext(), NoopHostnameVerifier.INSTANCE); + Registry socketFactoryRegistry = + RegistryBuilder. create() + .register("https", sslsf) + .register("http", new PlainConnectionSocketFactory()) + .build(); + final BasicHttpClientConnectionManager connectionManager = new BasicHttpClientConnectionManager(socketFactoryRegistry); + httpClientBuilder.setConnectionManager(connectionManager); return this; } @@ -128,7 +152,7 @@ public HttpClientConfigurer skipTlsCertificateVerification(boolean skipTlsCertif } public HttpClientConfigurer addInterceptor(HttpRequestInterceptor interceptor) { - httpClientBuilder.addInterceptorLast(interceptor); + httpClientBuilder.addRequestInterceptorLast(interceptor); return this; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpUtils.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpUtils.java index 28c6a4ccd2..0569d43206 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpUtils.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/HttpUtils.java @@ -21,11 +21,12 @@ import javax.net.ssl.SSLContext; -import org.apache.http.client.HttpClient; -import org.apache.http.ssl.SSLContexts; +import org.apache.hc.core5.ssl.SSLContexts; + + /** - * Provides utilities for the Apache {@link HttpClient}, used to make REST calls + * Provides utilities for the Apache {@code HttpClient}, used to make REST calls * * @author Gunnar Hillert */ diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/PreemptiveBasicAuthHttpComponentsClientHttpRequestFactory.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/PreemptiveBasicAuthHttpComponentsClientHttpRequestFactory.java index 86fa469a29..4a5a257511 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/PreemptiveBasicAuthHttpComponentsClientHttpRequestFactory.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/PreemptiveBasicAuthHttpComponentsClientHttpRequestFactory.java @@ -17,14 +17,14 @@ import java.net.URI; -import org.apache.http.HttpHost; -import org.apache.http.client.AuthCache; -import org.apache.http.client.HttpClient; -import org.apache.http.client.protocol.HttpClientContext; -import org.apache.http.impl.auth.BasicScheme; -import org.apache.http.impl.client.BasicAuthCache; -import org.apache.http.protocol.BasicHttpContext; -import org.apache.http.protocol.HttpContext; +import org.apache.hc.client5.http.auth.AuthCache; +import org.apache.hc.client5.http.classic.HttpClient; +import org.apache.hc.client5.http.impl.auth.BasicAuthCache; +import org.apache.hc.client5.http.impl.auth.BasicScheme; +import org.apache.hc.client5.http.protocol.HttpClientContext; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.protocol.BasicHttpContext; +import org.apache.hc.core5.http.protocol.HttpContext; import org.springframework.http.HttpMethod; import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ResourceBasedAuthorizationInterceptor.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ResourceBasedAuthorizationInterceptor.java index 04c7b8bb6c..e93be0e202 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ResourceBasedAuthorizationInterceptor.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/ResourceBasedAuthorizationInterceptor.java @@ -18,11 +18,12 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import org.apache.http.HttpException; -import org.apache.http.HttpHeaders; -import org.apache.http.HttpRequest; -import org.apache.http.HttpRequestInterceptor; -import org.apache.http.protocol.HttpContext; +import org.apache.hc.core5.http.EntityDetails; +import org.apache.hc.core5.http.HttpException; +import org.apache.hc.core5.http.HttpHeaders; +import org.apache.hc.core5.http.HttpRequest; +import org.apache.hc.core5.http.HttpRequestInterceptor; +import org.apache.hc.core5.http.protocol.HttpContext; import org.springframework.core.io.Resource; import org.springframework.util.StreamUtils; @@ -42,7 +43,7 @@ public ResourceBasedAuthorizationInterceptor(CheckableResource resource) { } @Override - public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { + public void process(HttpRequest httpRequest, EntityDetails entityDetails, HttpContext httpContext) throws HttpException, IOException { final String credentials = StreamUtils.copyToString(resource.getInputStream(), StandardCharsets.UTF_8).trim(); resource.check(); httpRequest.addHeader(HttpHeaders.AUTHORIZATION, credentials); diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java index 9343739c88..e1f08b5af4 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/job/support/JobUtilsTests.java @@ -15,30 +15,29 @@ */ package org.springframework.cloud.dataflow.rest.job.support; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; /** * @author Gunnar Hillert + * @author Corneil du Plessis * @since 1.0 */ -public class JobUtilsTests { +class JobUtilsTests { /** * Test method for * {@link JobUtils#isJobExecutionRestartable(org.springframework.batch.core.JobExecution)}. */ @Test - public void testIsJobExecutionRestartable() { + void isJobExecutionRestartable() { final JobExecution jobExecution = new JobExecution(1L); - assertFalse(JobUtils.isJobExecutionRestartable(jobExecution)); + assertThat(JobUtils.isJobExecutionRestartable(jobExecution)).isFalse(); } /** @@ -46,9 +45,9 @@ public void testIsJobExecutionRestartable() { * {@link JobUtils#isJobExecutionAbandonable(org.springframework.batch.core.JobExecution)}. */ @Test - public void testIsJobExecutionAbandonable() { + void isJobExecutionAbandonable() { final JobExecution jobExecution = new JobExecution(1L); - assertFalse(JobUtils.isJobExecutionAbandonable(jobExecution)); + assertThat(JobUtils.isJobExecutionAbandonable(jobExecution)).isFalse(); } /** @@ -56,46 +55,46 @@ public void testIsJobExecutionAbandonable() { * {@link JobUtils#isJobExecutionStoppable(org.springframework.batch.core.JobExecution)}. */ @Test - public void testIsJobExecutionStoppable() { + void isJobExecutionStoppable() { final JobExecution jobExecution = new JobExecution(1L); - assertTrue(JobUtils.isJobExecutionStoppable(jobExecution)); + assertThat(JobUtils.isJobExecutionStoppable(jobExecution)).isTrue(); } @Test - public void testIsJobExecutionRestartableWithNullJobExecution() { + void isJobExecutionRestartableWithNullJobExecution() { try { JobUtils.isJobExecutionRestartable(null); fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The provided jobExecution must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The provided jobExecution must not be null."); } } @Test - public void testIsJobExecutionAbandonableWithNullJobExecution() { + void isJobExecutionAbandonableWithNullJobExecution() { try { JobUtils.isJobExecutionAbandonable(null); fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The provided jobExecution must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The provided jobExecution must not be null."); } } @Test - public void testIsJobExecutionStoppableWithNullJobExecution() { + void isJobExecutionStoppableWithNullJobExecution() { try { JobUtils.isJobExecutionStoppable(null); fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The provided jobExecution must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The provided jobExecution must not be null."); } } @Test - public void testIsJobExecutionRestartableWithNullBatchStatus() { + void isJobExecutionRestartableWithNullBatchStatus() { try { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(null); @@ -103,12 +102,12 @@ public void testIsJobExecutionRestartableWithNullBatchStatus() { fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The BatchStatus of the provided jobExecution must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The BatchStatus of the provided jobExecution must not be null."); } } @Test - public void testIsJobExecutionAbandonableWithNullBatchStatus() { + void isJobExecutionAbandonableWithNullBatchStatus() { try { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(null); @@ -116,12 +115,12 @@ public void testIsJobExecutionAbandonableWithNullBatchStatus() { fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The BatchStatus of the provided jobExecution must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The BatchStatus of the provided jobExecution must not be null."); } } @Test - public void testIsJobExecutionStoppableWithNullBatchStatus() { + void isJobExecutionStoppableWithNullBatchStatus() { try { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(null); @@ -129,50 +128,50 @@ public void testIsJobExecutionStoppableWithNullBatchStatus() { fail("Expected an IllegalArgumentException to be thrown."); } catch (IllegalArgumentException e) { - assertEquals("The BatchStatus of the provided jobExecution must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The BatchStatus of the provided jobExecution must not be null."); } } @Test - public void testIsJobExecutionStoppableWithCompletedBatchStatus() { + void isJobExecutionStoppableWithCompletedBatchStatus() { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(BatchStatus.COMPLETED); - assertFalse(JobUtils.isJobExecutionStoppable(jobExecution)); + assertThat(JobUtils.isJobExecutionStoppable(jobExecution)).isFalse(); } @Test - public void testIsJobExecutionRestartableWithCompletedBatchStatus() { + void isJobExecutionRestartableWithCompletedBatchStatus() { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(BatchStatus.COMPLETED); - assertFalse(JobUtils.isJobExecutionRestartable(jobExecution)); + assertThat(JobUtils.isJobExecutionRestartable(jobExecution)).isFalse(); } @Test - public void testIsJobExecutionAbandonableWithCompletedBatchStatus() { + void isJobExecutionAbandonableWithCompletedBatchStatus() { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(BatchStatus.COMPLETED); - assertFalse(JobUtils.isJobExecutionAbandonable(jobExecution)); + assertThat(JobUtils.isJobExecutionAbandonable(jobExecution)).isFalse(); } @Test - public void testIsJobExecutionStoppableWithFailedBatchStatus() { + void isJobExecutionStoppableWithFailedBatchStatus() { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(BatchStatus.FAILED); - assertFalse(JobUtils.isJobExecutionStoppable(jobExecution)); + assertThat(JobUtils.isJobExecutionStoppable(jobExecution)).isFalse(); } @Test - public void testIsJobExecutionRestartableWithFailedBatchStatus() { + void isJobExecutionRestartableWithFailedBatchStatus() { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(BatchStatus.FAILED); - assertTrue(JobUtils.isJobExecutionRestartable(jobExecution)); + assertThat(JobUtils.isJobExecutionRestartable(jobExecution)).isTrue(); } @Test - public void testIsJobExecutionAbandonableWithFailedBatchStatus() { + void isJobExecutionAbandonableWithFailedBatchStatus() { final JobExecution jobExecution = new JobExecution(1L); jobExecution.setStatus(BatchStatus.FAILED); - assertTrue(JobUtils.isJobExecutionAbandonable(jobExecution)); + assertThat(JobUtils.isJobExecutionAbandonable(jobExecution)).isTrue(); } } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java index ab155b01af..98b2ea449e 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/DeploymentStateResourceTests.java @@ -20,18 +20,18 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * @author Gunnar Hillert + * @author Corneil du Plessis */ -public class DeploymentStateResourceTests { +class DeploymentStateResourceTests { @Test - public void testSerializationOfSingleStepExecution() throws JsonProcessingException { + void serializationOfSingleStepExecution() throws JsonProcessingException { final ObjectMapper objectMapper = new ObjectMapper(); @@ -40,9 +40,9 @@ public void testSerializationOfSingleStepExecution() throws JsonProcessingExcept final DocumentContext documentContext = JsonPath.parse(result); - assertThat(documentContext.read("$.key"), is("deployed")); - assertThat(documentContext.read("$.displayName"), is("Deployed")); - assertThat(documentContext.read("$.description"), is("The stream has been successfully deployed")); + assertEquals("deployed", documentContext.read("$.key")); + assertEquals("Deployed", documentContext.read("$.displayName")); + assertEquals("The stream has been successfully deployed", documentContext.read("$.description")); } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java index 65c5b15963..a324df53c5 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/HttpClientTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,28 +19,57 @@ import java.io.IOException; import java.net.URI; -import org.apache.http.HttpHeaders; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.impl.client.CloseableHttpClient; -import org.assertj.core.api.Assertions; -import org.junit.Test; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.core5.http.HttpHeaders; +import org.apache.hc.core5.http.io.HttpClientResponseHandler; +import org.junit.jupiter.api.Test; +import org.springframework.boot.SpringBootConfiguration; +import org.springframework.boot.autoconfigure.EnableAutoConfiguration; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.web.server.LocalServerPort; import org.springframework.cloud.dataflow.rest.util.CheckableResource; import org.springframework.cloud.dataflow.rest.util.HttpClientConfigurer; import org.springframework.cloud.dataflow.rest.util.ResourceBasedAuthorizationInterceptor; import org.springframework.core.io.ByteArrayResource; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; /** * @author Mike Heath + * @author Corneil du Plessis */ -public class HttpClientTest { - - static final class TestException extends IOException { - TestException() { - super("It broke"); +@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = HttpClientTest.HttpClientTestApp.class) +class HttpClientTest { + + @LocalServerPort + private int port; + + @Test + void resourceBasedAuthorizationHeader() throws Exception { + var credentials = "Super Secret Credentials"; + var resource = new ByteArrayCheckableResource(credentials.getBytes(), null); + var targetHost = new URI("http://localhost/" + port); + try (var client = HttpClientConfigurer.create(targetHost) + .addInterceptor(new ResourceBasedAuthorizationInterceptor(resource)) + .addInterceptor((request, entityDetails, context) -> { + var authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(); + assertThat(authorization).isEqualTo(credentials); + // Throw an exception to short-circuit making an HTTP request + throw new Passed(); + }) + .buildHttpClient()) { + assertThatExceptionOfType(Passed.class).isThrownBy(() -> client.execute(new HttpGet(targetHost), getNoOpResponseHandler())); } } + private HttpClientResponseHandler getNoOpResponseHandler() { + return response -> "noOp"; + } + static final class ByteArrayCheckableResource extends ByteArrayResource implements CheckableResource { private final IOException exception; @@ -57,48 +86,22 @@ public void check() throws IOException { } } - @Test(expected = Passed.class) - public void resourceBasedAuthorizationHeader() throws Exception { - final String credentials = "Super Secret Credentials"; - - final CheckableResource resource = new ByteArrayCheckableResource(credentials.getBytes(), null); - - final URI targetHost = new URI("http://test.com/"); - try (final CloseableHttpClient client = HttpClientConfigurer.create(targetHost) - .addInterceptor(new ResourceBasedAuthorizationInterceptor(resource)) - .addInterceptor((request, context) -> { - final String authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(); - Assertions.assertThat(authorization).isEqualTo(credentials); - - // Throw an exception to short-circuit making an HTTP request - throw new Passed(); - }) - .buildHttpClient()) { - client.execute(new HttpGet(targetHost)); - } - } - static final class Passed extends RuntimeException { } - @Test(expected = TestException.class) - public void resourceBasedAuthorizationHeaderResourceCheck() throws Exception { - final String credentials = "Super Secret Credentials"; + @EnableAutoConfiguration + @SpringBootConfiguration + static class HttpClientTestApp { - final CheckableResource resource = new ByteArrayCheckableResource(credentials.getBytes(), new TestException()); + @RestController + static class TestController { - final URI targetHost = new URI("http://test.com/"); - try (final CloseableHttpClient client = HttpClientConfigurer.create(targetHost) - .addInterceptor(new ResourceBasedAuthorizationInterceptor(resource)) - .addInterceptor((request, context) -> { - final String authorization = request.getFirstHeader(HttpHeaders.AUTHORIZATION).getValue(); - Assertions.assertThat(authorization).isEqualTo(credentials); + @GetMapping("/") + public String home() { + return "Hello World"; + } - // Throw an exception to short-circuit making an HTTP request - throw new Passed(); - }) - .buildHttpClient()) { - client.execute(new HttpGet(targetHost)); } + } } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java index 14e2c5a11e..48444744b3 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,21 +16,24 @@ package org.springframework.cloud.dataflow.rest.resource; +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collections; -import java.util.Date; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.core.io.UrlResource; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.assertj.core.api.Assertions.assertThat; /** * Provides tests for the {@link TaskExecutionResourceTests} class. @@ -38,69 +41,109 @@ * @author Gunnar Hillert * @author Ilayaperumal Gopinathan * @author Glenn Renfro + * @author Corneil du Plessis */ -public class TaskExecutionResourceTests { +class TaskExecutionResourceTests { @Test - public void testTaskExecutionStatusWithNoTaskExecutionSet() { + void taskExecutionStatusWithNoTaskExecutionSet() { final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(); - assertEquals(TaskExecutionStatus.UNKNOWN, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.UNKNOWN); } @Test - public void testTaskExecutionStatusWithNoStartTime() { + void taskExecutionStatusWithNoStartTime() { final TaskExecution taskExecution = new TaskExecution(); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution); - assertEquals(TaskExecutionStatus.UNKNOWN, taskExecutionResource.getTaskExecutionStatus()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.UNKNOWN); } @Test - public void testTaskExecutionStatusWithRunningTaskExecution() { + void taskExecutionStatusWithRunningTaskExecution() { final TaskExecution taskExecution = new TaskExecution(); - taskExecution.setStartTime(new Date()); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution); - assertEquals(TaskExecutionStatus.RUNNING, taskExecutionResource.getTaskExecutionStatus()); - assertNull(taskExecutionResource.getExitCode()); + taskExecution.setStartTime(LocalDateTime.now()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.RUNNING); + assertThat(taskExecutionResource.getExitCode()).isNull(); } @Test - public void testTaskExecutionStatusWithSuccessfulTaskExecution() { + void taskExecutionStatusWithSuccessfulTaskExecution() { + final TaskExecution taskExecution = getDefaultTaskExecution(); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + } + + @Test + void ctrExecutionStatusWithSuccessfulJobExecution() { + final TaskExecution taskExecution = getDefaultTaskExecution(); + JobExecution jobExecution = new JobExecution(1L); + jobExecution.setExitStatus(ExitStatus.COMPLETED); + TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + } + + @Test + void ctrExecutionStatusWithFailedJobExecution() { final TaskExecution taskExecution = new TaskExecution(); - taskExecution.setStartTime(new Date()); - taskExecution.setEndTime(new Date()); + taskExecution.setStartTime(LocalDateTime.now()); + taskExecution.setEndTime(LocalDateTime.now()); taskExecution.setExitCode(0); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + JobExecution jobExecution = new JobExecution(1L); + jobExecution.setExitStatus(ExitStatus.FAILED); + TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); } @Test - public void testTaskExecutionStatusWithFailedTaskExecution() { + void taskExecutionStatusWithFailedTaskExecution() { final TaskExecution taskExecution = new TaskExecution(); - taskExecution.setStartTime(new Date()); - taskExecution.setEndTime(new Date()); + taskExecution.setStartTime(LocalDateTime.now()); + taskExecution.setEndTime(LocalDateTime.now()); taskExecution.setExitCode(123); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution); - assertEquals(TaskExecutionStatus.ERROR, taskExecutionResource.getTaskExecutionStatus()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); } @Test - public void testTaskExecutionForTaskExecutionRel() throws Exception{ - final TaskExecution taskExecution = new TaskExecution(); - taskExecution.setStartTime(new Date()); - taskExecution.setEndTime(new Date()); - taskExecution.setExitCode(0); + void taskExecutionForTaskExecutionRel() throws Exception { + + TaskExecution taskExecution = getDefaultTaskExecution(); TaskManifest taskManifest = new TaskManifest(); taskManifest.setPlatformName("testplatform"); taskManifest.setTaskDeploymentRequest(new AppDeploymentRequest(new AppDefinition("testapp", Collections.emptyMap()), new UrlResource("http://foo/"))); - TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), taskManifest); + TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), taskManifest, null); TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertEquals("testplatform", taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); - taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>()); + assertThat(taskExecutionResource.getPlatformName()).isEqualTo("testplatform"); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, null); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + JobExecution jobExecution = new JobExecution(1L, new JobParameters()); + jobExecution.setExitStatus(ExitStatus.FAILED); + + TaskJobExecution ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); + jobExecution.setExitStatus(ExitStatus.COMPLETED); + ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertNull(taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); } + private TaskExecution getDefaultTaskExecution() { + final TaskExecution taskExecution = new TaskExecution(); + taskExecution.setStartTime(LocalDateTime.now()); + taskExecution.setEndTime(LocalDateTime.now()); + taskExecution.setExitCode(0); + return taskExecution; + } } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializerTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializerTests.java new file mode 100644 index 0000000000..9c6fc9131c --- /dev/null +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/JobParameterJacksonDeserializerTests.java @@ -0,0 +1,64 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.rest.support.jackson; + +import java.io.ByteArrayInputStream; +import java.io.IOException; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.json.UTF8StreamJsonParser; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.jupiter.api.Test; + +import org.springframework.batch.core.JobParameter; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; + +class JobParameterJacksonDeserializerTests { + + @Test + void validJobParameter() throws IOException { + JobParameterJacksonDeserializer jobParameterJacksonDeserializer = new JobParameterJacksonDeserializer(); + String json = "{\"value\":\"BAR\",\"type\":\"java.lang.String\",\"identifying\":true}"; + JobParameter jobParameter = jobParameterJacksonDeserializer.deserialize(getJsonParser(json), null); + assertThat(jobParameter.getType()).isEqualTo(String.class); + assertThat(jobParameter.getValue()).isEqualTo("BAR"); + assertThat(jobParameter.isIdentifying()).isTrue(); + } + + @Test + void inValidJobParameter() throws IOException { + JobParameterJacksonDeserializer jobParameterJacksonDeserializer = new JobParameterJacksonDeserializer(); + String json = "{\"value\":\"BAR\",\"type\":\"java.lang.FOO\",\"identifying\":true}"; + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> { + jobParameterJacksonDeserializer.deserialize(getJsonParser(json), null); + }) + .withMessage("JobParameter type java.lang.FOO is not supported by DataFlow"); + } + + private JsonParser getJsonParser(String json) throws IOException { + JsonFactory factory = new JsonFactory(); + byte[] jsonData = json.getBytes(); + ByteArrayInputStream inputStream = new ByteArrayInputStream(jsonData); + UTF8StreamJsonParser jsonParser = (UTF8StreamJsonParser) factory.createParser(inputStream); + jsonParser.setCodec(new ObjectMapper()); + return jsonParser; + } +} diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java index 7cab42c7bd..3c2ac22646 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/support/jackson/StepExecutionJacksonMixInTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,22 +19,24 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Test; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; import org.springframework.batch.item.ExecutionContext; -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.not; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; /** * Tests that the {@link ExecutionContextJacksonMixIn} works as expected. * * @author Gunnar Hillert + * @author Corneil du Plessis */ -public class StepExecutionJacksonMixInTests { +class StepExecutionJacksonMixInTests { /** * Assert that without using the {@link ExecutionContextJacksonMixIn} Jackson does not @@ -42,15 +44,16 @@ public class StepExecutionJacksonMixInTests { * * @throws JsonProcessingException if a Json generation error occurs. */ - @Test(expected = JsonMappingException.class) - public void testSerializationOfSingleStepExecutionWithoutMixin() throws JsonProcessingException { - + @Test + void serializationOfSingleStepExecutionWithoutMixin() throws JsonProcessingException { + assertThatExceptionOfType(JsonMappingException.class).isThrownBy(() -> { final ObjectMapper objectMapper = new ObjectMapper(); final StepExecution stepExecution = getStepExecution(); final String result = objectMapper.writeValueAsString(stepExecution); - assertThat(result, containsString("\"executionContext\":{\"dirty\":true,\"empty\":false}")); + assertThat(result).contains("\"executionContext\":{\"dirty\":true,\"empty\":false}"); + }); } /** @@ -60,9 +63,10 @@ public void testSerializationOfSingleStepExecutionWithoutMixin() throws JsonProc * @throws JsonProcessingException if a Json generation error occurs. */ @Test - public void testSerializationOfSingleStepExecution() throws JsonProcessingException { + void serializationOfSingleStepExecution() throws JsonProcessingException { final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.registerModule(new JavaTimeModule()); objectMapper.addMixIn(StepExecution.class, StepExecutionJacksonMixIn.class); objectMapper.addMixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class); @@ -70,19 +74,19 @@ public void testSerializationOfSingleStepExecution() throws JsonProcessingExcept final StepExecution stepExecution = getStepExecution(); final String result = objectMapper.writeValueAsString(stepExecution); - assertThat(result, not(containsString("\"executionContext\":{\"dirty\":true,\"empty\":false}"))); - assertThat(result, containsString("\"executionContext\":{\"dirty\":true,\"empty\":false,\"values\":[{")); + assertThat(result).doesNotContain("\"executionContext\":{\"dirty\":true,\"empty\":false}"); + assertThat(result).contains("\"executionContext\":{\"dirty\":true,\"empty\":false,\"values\":[{"); - assertThat(result, containsString("{\"counter\":1234}")); - assertThat(result, containsString("{\"myDouble\":1.123456}")); - assertThat(result, containsString("{\"Josh\":4444444444}")); - assertThat(result, containsString("{\"awesomeString\":\"Yep\"}")); - assertThat(result, containsString("{\"hello\":\"world\"")); - assertThat(result, containsString("{\"counter2\":9999}")); + assertThat(result).contains("{\"counter\":1234}"); + assertThat(result).contains("{\"myDouble\":1.123456}"); + assertThat(result).contains("{\"Josh\":4444444444}"); + assertThat(result).contains("{\"awesomeString\":\"Yep\"}"); + assertThat(result).contains("{\"hello\":\"world\""); + assertThat(result).contains("{\"counter2\":9999}"); } private StepExecution getStepExecution() { - JobExecution jobExecution = new JobExecution(1L, null, "hi"); + JobExecution jobExecution = new JobExecution(1L, new JobParameters()); final StepExecution stepExecution = new StepExecution("step1", jobExecution); jobExecution.createStepExecution("step1"); final ExecutionContext executionContext = stepExecution.getExecutionContext(); diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java index 06fc3b4cc4..8edbf5674c 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,18 +25,13 @@ import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.util.FileCopyUtils; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.hasKey; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.entry; +import static org.assertj.core.api.Assertions.fail; /** * Tests for {@link DeploymentPropertiesUtils}. @@ -44,130 +39,187 @@ * @author Janne Valkealahti * @author Christian Tzolov * @author Ilayaperumal Gopinathan + * @author Glenn Renfro + * @author Corneil du Plessis */ -public class DeploymentPropertiesUtilsTests { +class DeploymentPropertiesUtilsTests { private static void assertArrays(String[] left, String[] right) { ArrayList params = new ArrayList<>(Arrays.asList(left)); - assertThat(DeploymentPropertiesUtils.removeQuoting(params), containsInAnyOrder(right)); + assertThat(DeploymentPropertiesUtils.removeQuoting(params)).contains(right); } @Test - public void testDeploymentPropertiesParsing() { + void deploymentPropertiesParsing() { Map props = DeploymentPropertiesUtils.parse("app.foo.bar=v, app.foo.wizz=v2 , deployer.foo" - + ".pot=fern, app.other.key = value , deployer.other.cow = meww, scheduler.other.key = baz"); - assertThat(props, hasEntry("app.foo.bar", "v")); - assertThat(props, hasEntry("app.other.key", "value")); - assertThat(props, hasEntry("app.foo.wizz", "v2")); - assertThat(props, hasEntry("deployer.foo.pot", "fern")); - assertThat(props, hasEntry("deployer.other.cow", "meww")); - assertThat(props, hasEntry("scheduler.other.key", "baz")); + + ".pot=fern, app.other.key = value , deployer.other.cow = meww, deployer.other.key = baz"); + assertThat(props.entrySet()).contains(entry("app.foo.bar", "v")); + assertThat(props.entrySet()).contains(entry("app.other.key", "value")); + assertThat(props.entrySet()).contains(entry("app.foo.wizz", "v2")); + assertThat(props.entrySet()).contains(entry("deployer.foo.pot", "fern")); + assertThat(props.entrySet()).contains(entry("deployer.other.cow", "meww")); + assertThat(props.entrySet()).contains(entry("deployer.other.key", "baz")); props = DeploymentPropertiesUtils.parse("app.f=v"); - assertThat(props, hasEntry("app.f", "v")); + assertThat(props.entrySet()).contains(entry("app.f", "v")); props = DeploymentPropertiesUtils.parse("app.foo1=bar1,app.foo2=bar2,app.foo3=bar3,xxx3"); - assertThat(props, hasEntry("app.foo1", "bar1")); - assertThat(props, hasEntry("app.foo2", "bar2")); - assertThat(props, hasEntry("app.foo3", "bar3,xxx3")); + assertThat(props.entrySet()).contains(entry("app.foo1", "bar1")); + assertThat(props.entrySet()).contains(entry("app.foo2", "bar2")); + assertThat(props.entrySet()).contains(entry("app.foo3", "bar3,xxx3")); props = DeploymentPropertiesUtils.parse("deployer.foo1 = bar1 , app.foo2= bar2, deployer.foo3 = bar3,xxx3"); - assertThat(props, hasEntry("deployer.foo1", "bar1")); - assertThat(props, hasEntry("app.foo2", "bar2")); - assertThat(props, hasEntry("deployer.foo3", "bar3,xxx3")); + assertThat(props.entrySet()).contains(entry("deployer.foo1", "bar1")); + assertThat(props.entrySet()).contains(entry("app.foo2", "bar2")); + assertThat(props.entrySet()).contains(entry("deployer.foo3", "bar3,xxx3")); props = DeploymentPropertiesUtils.parse("app.*.count=1"); - assertThat(props, hasEntry("app.*.count", "1")); + assertThat(props.entrySet()).contains(entry("app.*.count", "1")); props = DeploymentPropertiesUtils.parse("app.*.my-count=1"); - assertThat(props, hasEntry("app.*.my-count", "1")); + assertThat(props.entrySet()).contains(entry("app.*.my-count", "1")); props = DeploymentPropertiesUtils.parse("app.transform.producer.partitionKeyExpression=fakeExpression('xxx')"); - assertThat(props, hasEntry("app.transform.producer.partitionKeyExpression", "fakeExpression('xxx')")); + assertThat(props.entrySet()).contains(entry("app.transform.producer.partitionKeyExpression", "fakeExpression('xxx')")); try { DeploymentPropertiesUtils.parse("invalidkeyvalue"); fail("Illegal Argument Exception expected."); } catch (Exception e) { - assertTrue(e.getMessage().equals("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed.")); + assertThat(e.getMessage()).isEqualTo("Only deployment property keys starting with 'app.' or 'deployer.' or 'version.' allowed. Not invalidkeyvalue"); } props = DeploymentPropertiesUtils.parse("deployer.foo=bar,invalidkeyvalue2"); - assertThat(props.size(), is(1)); - assertThat(props, hasEntry("deployer.foo", "bar,invalidkeyvalue2")); + assertThat(props).hasSize(1); + assertThat(props.entrySet()).contains(entry("deployer.foo", "bar,invalidkeyvalue2")); props = DeploymentPropertiesUtils.parse("app.foo.bar1=jee1,jee2,jee3,deployer.foo.bar2=jee4,jee5,jee6"); - assertThat(props, hasEntry("app.foo.bar1", "jee1,jee2,jee3")); - assertThat(props, hasEntry("deployer.foo.bar2", "jee4,jee5,jee6")); + assertThat(props.entrySet()).contains(entry("app.foo.bar1", "jee1,jee2,jee3")); + assertThat(props.entrySet()).contains(entry("deployer.foo.bar2", "jee4,jee5,jee6")); props = DeploymentPropertiesUtils.parse("app.foo.bar1=xxx=1,app.foo.bar2=xxx=2"); - assertThat(props, hasEntry("app.foo.bar1", "xxx=1")); - assertThat(props, hasEntry("app.foo.bar2", "xxx=2")); + assertThat(props.entrySet()).contains(entry("app.foo.bar1", "xxx=1")); + assertThat(props.entrySet()).contains(entry("app.foo.bar2", "xxx=2")); props = DeploymentPropertiesUtils.parse("app.foo.bar1=xxx=1,test=value,app.foo.bar2=xxx=2"); - assertThat(props, hasEntry("app.foo.bar1", "xxx=1,test=value")); - assertThat(props, hasEntry("app.foo.bar2", "xxx=2")); + assertThat(props.entrySet()).contains(entry("app.foo.bar1", "xxx=1,test=value")); + assertThat(props.entrySet()).contains(entry("app.foo.bar2", "xxx=2")); } @Test - public void testDeploymentPropertiesParsing2() { + void deploymentPropertiesParsing2() { List props = DeploymentPropertiesUtils.parseParamList("app.foo.bar=v, app.foo.wizz=v2 , deployer.foo" + ".pot=fern, app.other.key = value , deployer.other.cow = meww,special=koza=boza,more", ","); - assertTrue(props.contains("app.foo.bar=v")); - assertTrue(props.contains(" app.other.key = value ")); - assertTrue(props.contains(" app.foo.wizz=v2 ")); - assertTrue(props.contains(" deployer.foo.pot=fern")); - assertTrue(props.contains(" deployer.other.cow = meww,special=koza=boza,more")); + assertThat(props) + .contains("app.foo.bar=v") + .contains(" app.other.key = value ") + .contains(" app.foo.wizz=v2 ") + .contains(" deployer.foo.pot=fern") + .contains(" deployer.other.cow = meww,special=koza=boza,more"); try { DeploymentPropertiesUtils.parseParamList("a=b", " "); fail("Illegal Argument Exception expected."); } catch (Exception e) { - assertTrue(e.getMessage().equals("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed.")); + assertThat(e.getMessage()).isEqualTo("Only deployment property keys starting with 'app.' or 'deployer.' or 'version.' allowed. Not a=b"); } props = DeploymentPropertiesUtils.parseArgumentList("a=b c=d", " "); - assertTrue(props.contains("c=d")); - assertTrue(props.contains("a=b")); + assertThat(props) + .contains("c=d") + .contains("a=b"); + + props = DeploymentPropertiesUtils.parseArgumentList("a=b c=d ", " "); + + assertThat(props) + .contains("a=b") + .contains("c=d"); props = DeploymentPropertiesUtils.parseArgumentList("foo1=bar1 foo2=bar2 foo3=bar3 xxx3", " "); - assertTrue(props.contains("foo1=bar1")); - assertTrue(props.contains("foo2=bar2")); - assertTrue(props.contains("foo3=bar3 xxx3")); + assertThat(props) + .contains("foo1=bar1") + .contains("foo2=bar2") + .contains("foo3=bar3 xxx3"); } @Test - public void parseArgumentTestsWithQuotes() { + void parseArgumentTestsWithQuotes() { List props = DeploymentPropertiesUtils.parseArgumentList("a=\"b c\" e=f g=h", " "); - assertTrue(props.contains("a=\"b c\"")); - assertTrue(props.contains("e=f")); - assertTrue(props.contains("g=h")); + assertThat(props) + .contains("a=\"b c\"") + .contains("e=f") + .contains("g=h"); props = DeploymentPropertiesUtils.parseArgumentList("--composedTaskArguments=\"1.timestamp.format=YYYY " + "--timestamp.timestamp.format=MM --foo=bar bar=\"bazzz buzz\"\" " + "a=b c=d --foo=bar", " "); - assertTrue(props.contains("--composedTaskArguments=\"1.timestamp.format=YYYY " + - "--timestamp.timestamp.format=MM --foo=bar bar=\"bazzz buzz\"\"")); - assertTrue(props.contains("a=b")); - assertTrue(props.contains("c=d")); - assertTrue(props.contains("--foo=bar")); + assertThat(props) + .contains("--composedTaskArguments=\"1.timestamp.format=YYYY " + + "--timestamp.timestamp.format=MM --foo=bar bar=\"bazzz buzz\"\"") + .contains("a=b") + .contains("c=d") + .contains("--foo=bar"); + } + + @Test + void parseArgumentTestsWithMultipleQuotes() { + + List props = DeploymentPropertiesUtils.parseArgumentList("arg2=\"Argument 2\" arg3=val3", " "); + assertThat(props) + .contains("arg2=\"Argument 2\"") + .contains("arg3=val3"); + + props = DeploymentPropertiesUtils.parseArgumentList("arg0=val0 arg1=val1 arg2=\"Argument 2\" arg3=val3", " "); + assertThat(props) + .contains("arg0=val0") + .contains("arg1=val1") + .contains("arg2=\"Argument 2\"") + .contains("arg3=val3"); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=val1 arg2=\"Argument 2\" arg3=val3", " "); + assertThat(props) + .contains("-arg1=val1") + .contains("arg2=\"Argument 2\"") + .contains("arg3=val3"); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=val1 arg2=\"Argument 2\" arg3=val3 arg4=\"Argument 4\"", " "); + assertThat(props) + .contains("-arg1=val1") + .contains("arg2=\"Argument 2\"") + .contains("arg3=val3") + .contains("arg4=\"Argument 4\""); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=val1 arg2=\"Argument 2\" arg3=\"val3\" arg4=\"Argument 4\"", " "); + assertThat(props) + .contains("-arg1=val1") + .contains("arg2=\"Argument 2\"") + .contains("arg3=\"val3\"") + .contains("arg4=\"Argument 4\""); + + props = DeploymentPropertiesUtils.parseArgumentList("-arg1=\"val1\" arg2=\"Argument 2\" arg3=\"val3\" arg4=\"Argument 4\"", " "); + assertThat(props) + .contains("-arg1=\"val1\"") + .contains("arg2=\"Argument 2\"") + .contains("arg3=\"val3\"") + .contains("arg4=\"Argument 4\""); + } @Test - public void testLongDeploymentPropertyValues() { + void longDeploymentPropertyValues() { Map props = DeploymentPropertiesUtils .parse("app.foo.bar=FoooooooooooooooooooooBar,app.foo" + ".bar2=FoooooooooooooooooooooBar"); - assertThat(props, hasEntry("app.foo.bar", "FoooooooooooooooooooooBar")); + assertThat(props.entrySet()).contains(entry("app.foo.bar", "FoooooooooooooooooooooBar")); props = DeploymentPropertiesUtils.parse("app.foo.bar=FooooooooooooooooooooooooooooooooooooooooooooooooooooBar"); - assertThat(props, hasEntry("app.foo.bar", "FooooooooooooooooooooooooooooooooooooooooooooooooooooBar")); + assertThat(props.entrySet()).contains(entry("app.foo.bar", "FooooooooooooooooooooooooooooooooooooooooooooooooooooBar")); } @Test - public void testDeployerProperties() { + void deployerProperties() { Map props = new LinkedHashMap<>(); props.put("app.myapp.foo", "bar"); props.put("deployer.myapp.count", "2"); @@ -177,14 +229,14 @@ public void testDeployerProperties() { props.put("deployer.myapp.precedence", "app"); Map result = DeploymentPropertiesUtils.extractAndQualifyDeployerProperties(props, "myapp"); - assertThat(result, hasEntry("spring.cloud.deployer.count", "2")); - assertThat(result, hasEntry("spring.cloud.deployer.foo", "bar")); - assertThat(result, hasEntry("spring.cloud.deployer.precedence", "app")); - assertThat(result, not(hasKey("app.myapp.foo"))); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.count", "2")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.foo", "bar")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.precedence", "app")); + assertThat(result.keySet()).doesNotContain("app.myapp.foo"); } @Test - public void testDeployerPropertiesWithApp() { + void deployerPropertiesWithApp() { Map props = new LinkedHashMap<>(); props.put("app.myapp.foo", "bar"); props.put("deployer.myapp.count", "2"); @@ -194,14 +246,14 @@ public void testDeployerPropertiesWithApp() { props.put("deployer.myapp.precedence", "app"); Map result = DeploymentPropertiesUtils.qualifyDeployerProperties(props, "myapp"); - assertThat(result, hasEntry("spring.cloud.deployer.count", "2")); - assertThat(result, hasEntry("spring.cloud.deployer.foo", "bar")); - assertThat(result, hasEntry("spring.cloud.deployer.precedence", "app")); - assertThat(result, hasKey("app.myapp.foo")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.count", "2")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.foo", "bar")); + assertThat(result.entrySet()).contains(entry("spring.cloud.deployer.precedence", "app")); + assertThat(result).containsKey("app.myapp.foo"); } @Test - public void testCommandLineParamsParsing() { + void commandLineParamsParsing() { assertArrays(new String[] { "--format=x,y,z" }, new String[] { "--format=x,y,z" }); assertArrays(new String[] { "--format=yyyy-MM-dd" }, new String[] { "--format=yyyy-MM-dd" }); assertArrays(new String[] { "'--format=yyyy-MM-dd HH:mm:ss.SSS'" }, @@ -220,16 +272,18 @@ public void testCommandLineParamsParsing() { } @Test - public void testParseDeploymentProperties() throws IOException { + void parseDeploymentProperties() throws IOException { File file = Files.createTempFile(null, ".yaml").toFile(); FileCopyUtils.copy("app.foo1:\n bar1: spam".getBytes(), file); Map props = DeploymentPropertiesUtils.parseDeploymentProperties("app.foo2=bar2", file, 0); - assertThat(props.size(), is(1)); - assertThat(props.get("app.foo2"), is("bar2")); + assertThat(props) + .hasSize(1) + .containsEntry("app.foo2", "bar2"); props = DeploymentPropertiesUtils.parseDeploymentProperties("foo2=bar2", file, 1); - assertThat(props.size(), is(1)); - assertThat(props.get("app.foo1.bar1"), is("spam")); + assertThat(props) + .hasSize(1) + .containsEntry("app.foo1.bar1", "spam"); } } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java index 5f6b8599ac..1160ef89cb 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/HttpClientConfigurerTests.java @@ -18,27 +18,27 @@ import java.lang.reflect.Field; import java.net.URI; -import org.apache.http.auth.AuthScope; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.client.HttpClient; -import org.junit.Assert; -import org.junit.Test; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.CredentialsProvider; +import org.junit.jupiter.api.Test; import org.springframework.util.ReflectionUtils; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; /** * @author Gunnar Hillert + * @author Corneil du Plessis * @since 1.4 */ -public class HttpClientConfigurerTests { +class HttpClientConfigurerTests { /** - * Basic test ensuring that the {@link HttpClient} is built successfully. + * Basic test ensuring that the {@code HttpClient} is built successfully. */ @Test - public void testThatHttpClientWithProxyIsCreated() throws Exception { + void thatHttpClientWithProxyIsCreated() throws Exception { final URI targetHost = new URI("http://test.com/"); final HttpClientConfigurer builder = HttpClientConfigurer.create(targetHost); @@ -47,11 +47,11 @@ public void testThatHttpClientWithProxyIsCreated() throws Exception { } /** - * Basic test ensuring that the {@link HttpClient} is built successfully with + * Basic test ensuring that the {@code HttpClient} is built successfully with * null username and password. */ @Test - public void testThatHttpClientWithProxyIsCreatedWithNullUsernameAndPassword() throws Exception { + void thatHttpClientWithProxyIsCreatedWithNullUsernameAndPassword() throws Exception { final URI targetHost = new URI("http://test.com/"); final HttpClientConfigurer builder = HttpClientConfigurer.create(targetHost); builder.withProxyCredentials(URI.create("https://spring.io/"), null, null); @@ -63,14 +63,14 @@ public void testThatHttpClientWithProxyIsCreatedWithNullUsernameAndPassword() th * Uri is not set. */ @Test - public void testHttpClientWithProxyCreationWithMissingScheme() throws Exception { + void httpClientWithProxyCreationWithMissingScheme() throws Exception { final URI targetHost = new URI("http://test.com/"); final HttpClientConfigurer builder = HttpClientConfigurer.create(targetHost); try { builder.withProxyCredentials(URI.create("spring"), "spring", "cloud"); } catch (IllegalArgumentException e) { - Assert.assertEquals("The scheme component of the proxyUri must not be empty.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The scheme component of the proxyUri must not be empty."); return; } fail("Expected an IllegalArgumentException to be thrown."); @@ -81,14 +81,14 @@ public void testHttpClientWithProxyCreationWithMissingScheme() throws Exception * Uri is null. */ @Test - public void testHttpClientWithNullProxyUri() throws Exception { + void httpClientWithNullProxyUri() throws Exception { final URI targetHost = new URI("http://test.com/"); final HttpClientConfigurer builder = HttpClientConfigurer.create(targetHost); try { builder.withProxyCredentials(null, null, null); } catch (IllegalArgumentException e) { - Assert.assertEquals("The proxyUri must not be null.", e.getMessage()); + assertThat(e.getMessage()).isEqualTo("The proxyUri must not be null."); return; } fail("Expected an IllegalArgumentException to be thrown."); @@ -98,7 +98,7 @@ public void testHttpClientWithNullProxyUri() throws Exception { * Test ensuring that the {@link AuthScope} is set for the target host. */ @Test - public void testThatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProviders() throws Exception { + void thatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProviders() throws Exception { final URI targetHost = new URI("http://test.com/"); final HttpClientConfigurer builder = HttpClientConfigurer.create(targetHost); builder.basicAuthCredentials("foo", "password"); @@ -107,15 +107,15 @@ public void testThatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProvider final Field credentialsProviderField = ReflectionUtils.findField(HttpClientConfigurer.class, "credentialsProvider"); ReflectionUtils.makeAccessible(credentialsProviderField); CredentialsProvider credentialsProvider = (CredentialsProvider) credentialsProviderField.get(builder); - Assert.assertNotNull(credentialsProvider.getCredentials(new AuthScope("test.com", 80))); - Assert.assertNull(credentialsProvider.getCredentials(new AuthScope("spring.io", 80))); + assertThat(credentialsProvider.getCredentials(new AuthScope("test.com", 80), null)).isNotNull(); + assertThat(credentialsProvider.getCredentials(new AuthScope("spring.io", 80), null)).isNull(); } /** * Test ensuring that the {@link AuthScope} is set for the target host and the proxy server. */ @Test - public void testThatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProviders2() throws Exception { + void thatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProviders2() throws Exception { final URI targetHost = new URI("http://test.com/"); final HttpClientConfigurer builder = HttpClientConfigurer.create(targetHost); builder.basicAuthCredentials("foo", "password"); @@ -124,7 +124,7 @@ public void testThatHttpClientWithProxyIsCreatedAndHasCorrectCredentialsProvider final Field credentialsProviderField = ReflectionUtils.findField(HttpClientConfigurer.class, "credentialsProvider"); ReflectionUtils.makeAccessible(credentialsProviderField); CredentialsProvider credentialsProvider = (CredentialsProvider) credentialsProviderField.get(builder); - Assert.assertNotNull(credentialsProvider.getCredentials(new AuthScope("test.com", 80))); - Assert.assertNotNull(credentialsProvider.getCredentials(new AuthScope("spring.io", 80))); + assertThat(credentialsProvider.getCredentials(new AuthScope("test.com", 80), null)).isNotNull(); + assertThat(credentialsProvider.getCredentials(new AuthScope("spring.io", 80), null)).isNotNull(); } } diff --git a/spring-cloud-dataflow-server-core/pom.xml b/spring-cloud-dataflow-server-core/pom.xml index 9355a7f4d2..6f2a45dd84 100644 --- a/spring-cloud-dataflow-server-core/pom.xml +++ b/spring-cloud-dataflow-server-core/pom.xml @@ -1,13 +1,22 @@ - + 4.0.0 org.springframework.cloud spring-cloud-dataflow-parent - 2.8.0-SNAPSHOT + 3.0.0-SNAPSHOT + ../spring-cloud-dataflow-parent spring-cloud-dataflow-server-core + spring-cloud-dataflow-server-core + Data Flow Server Core + jar + + true + 3.4.1 + io.micrometer @@ -25,10 +34,31 @@ io.micrometer.prometheus prometheus-rsocket-spring - + + jakarta.persistence + jakarta.persistence-api + + + jakarta.servlet + jakarta.servlet-api + + + jakarta.validation + jakarta.validation-api + + + org.hibernate.orm + hibernate-micrometer + org.springframework.cloud spring-cloud-dataflow-common-flyway + ${dataflow.version} + + + org.springframework.cloud + spring-cloud-dataflow-common-persistence + ${dataflow.version} com.jayway.jsonpath @@ -39,6 +69,10 @@ com.zaxxer HikariCP + + org.springdoc + springdoc-openapi-starter-webmvc-ui + org.springframework.boot spring-boot-starter-data-jpa @@ -46,14 +80,17 @@ org.springframework.cloud spring-cloud-dataflow-configuration-metadata + ${project.version} org.springframework.cloud spring-cloud-dataflow-completion + ${project.version} org.springframework.cloud spring-cloud-dataflow-core + ${project.version} org.springframework.cloud @@ -62,10 +99,12 @@ org.springframework.cloud spring-cloud-dataflow-rest-resource + ${project.version} org.springframework.cloud spring-cloud-dataflow-registry + ${project.version} org.springframework.cloud @@ -85,13 +124,8 @@ org.springframework.cloud - spring-cloud-starter-common-security-config-web - - - org.codehaus.jackson - jackson-mapper-asl - - + spring-cloud-common-security-config-web + ${project.version} org.springframework.boot @@ -120,9 +154,10 @@ - org.springframework.boot - spring-boot-starter-test - test + com.h2database + h2 + true + provided org.springframework.cloud @@ -131,20 +166,11 @@ org.springframework.cloud spring-cloud-task-batch - test - - - org.skyscreamer - jsonassert - test - - - com.h2database - h2 org.mariadb.jdbc mariadb-java-client + [3.1.2,) org.postgresql @@ -161,10 +187,6 @@ - - org.springframework.cloud - spring-cloud-starter-config - org.springframework.boot spring-boot-configuration-processor @@ -173,14 +195,17 @@ org.springframework.cloud spring-cloud-skipper-client + ${project.version} org.springframework.cloud spring-cloud-skipper + ${project.version} org.springframework.cloud spring-cloud-dataflow-audit + ${project.version} compile @@ -188,6 +213,47 @@ jsr305 provided + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.batch + spring-batch-test + test + + + org.awaitility + awaitility + test + + + org.hamcrest + hamcrest-junit + 2.0.0.0 + test + + + org.testcontainers + junit-jupiter + test + + + org.testcontainers + mariadb + test + + + org.testcontainers + postgresql + test + + + org.hibernate.orm + hibernate-ant + test + @@ -196,6 +262,7 @@ true META-INF/spring.factories + META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports banner.txt META-INF/dataflow-server-defaults.yml META-INF/application-stream-common-properties-defaults.yml @@ -206,5 +273,50 @@ + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + javadoc + + jar + + package + + + + + org.apache.maven.plugins + maven-source-plugin + 3.3.0 + + + source + + jar + + package + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + build-info + + + + + + io.github.git-commit-id + git-commit-id-maven-plugin + + diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/TaskValidationController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/TaskValidationController.java index 1342d23d26..e7fe9424d5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/TaskValidationController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/TaskValidationController.java @@ -26,9 +26,9 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -67,10 +67,10 @@ public TaskValidationController(TaskValidationService taskValidationService) { * @param name name of the task definition * @return The status for the apps in a task definition. */ - @RequestMapping(value = "/{name}", method = RequestMethod.GET) + @GetMapping("/{name}") @ResponseStatus(HttpStatus.OK) public TaskAppStatusResource validate( - @PathVariable("name") String name) { + @PathVariable String name) { ValidationStatus result = this.taskValidationService.validateTask(name); return new Assembler().toModel(result); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/AllInOneExecutionContextSerializer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/AllInOneExecutionContextSerializer.java new file mode 100644 index 0000000000..67fad89761 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/AllInOneExecutionContextSerializer.java @@ -0,0 +1,70 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.batch; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.repository.dao.Jackson2ExecutionContextStringSerializer; + +/** + * Implements the same logic as used in Batch 5.x + * @author Corneil du Plessis + */ +public class AllInOneExecutionContextSerializer extends Jackson2ExecutionContextStringSerializer { + private final static Logger logger = LoggerFactory.getLogger(AllInOneExecutionContextSerializer.class); + @SuppressWarnings({"unchecked", "NullableProblems"}) + @Override + public Map deserialize(InputStream inputStream) throws IOException { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + IOUtils.copy(inputStream, buffer); + Map result = new HashMap<>(); + // Try Jackson + try { + return super.deserialize(new ByteArrayInputStream(buffer.toByteArray())); + } catch (Throwable x) { + result.put("context.deserialize.error.jackson", x.toString()); + } + InputStream decodingStream = new ByteArrayInputStream(buffer.toByteArray()); + try { + // Try decode base64 + decodingStream = Base64.getDecoder().wrap(decodingStream); + } catch (Throwable x) { + // Use original input for java deserialization + decodingStream = new ByteArrayInputStream(buffer.toByteArray()); + result.put("context.deserialize.error.base64.decode", x.toString()); + } + try { + ObjectInputStream objectInputStream = new ObjectInputStream(decodingStream); + return (Map) objectInputStream.readObject(); + } catch (Throwable x) { + result.put("context.deserialize.error.java.deserialization", x.toString()); + } + // They may have a custom serializer or custom classes. + logger.warn("deserialization failed:{}", result); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowSqlPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowSqlPagingQueryProvider.java new file mode 100644 index 0000000000..b5a1e91097 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/DataflowSqlPagingQueryProvider.java @@ -0,0 +1,40 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch; + +import org.springframework.batch.item.database.PagingQueryProvider; + +/** + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public interface DataflowSqlPagingQueryProvider extends PagingQueryProvider { + + /** + * + * Generate the query that will provide the jump to item query. The itemIndex provided could be in the middle of + * the page and together with the page size it will be used to calculate the last index of the preceding page + * to be able to retrieve the sort key for this row. + * + * @param itemIndex the index for the next item to be read + * @param pageSize number of rows to read for each page + * @return the generated query + */ + String generateJumpToItemQuery(int itemIndex, int pageSize); + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java index fdb84fd86c..49a811d01c 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java @@ -1,5 +1,5 @@ /* - * Copyright 2006-2014 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,59 +17,76 @@ import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.LocalDateTime; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import javax.sql.DataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.converter.StringToLocalDateTimeConverter; import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; import org.springframework.batch.item.database.Order; -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; +import org.springframework.cloud.dataflow.server.batch.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowCallbackHandler; import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.jdbc.support.incrementer.AbstractDataFieldMaxValueIncrementer; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * @author Dave Syer * @author Michael Minella * @author Glenn Renfro + * @author Corneil du Plessis * */ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implements SearchableJobExecutionDao { + private static final String FIND_PARAMS_FROM_ID = "SELECT JOB_EXECUTION_ID, PARAMETER_NAME, PARAMETER_TYPE, PARAMETER_VALUE, IDENTIFYING FROM %PREFIX%JOB_EXECUTION_PARAMS WHERE JOB_EXECUTION_ID = ?"; + private static final String GET_COUNT = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION"; - private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=?"; + private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where I.JOB_NAME=?"; - private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and E.STATUS = ?"; + private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where E.STATUS = ?"; - private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=? AND E.STATUS = ?"; + private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where I.JOB_NAME=? AND E.STATUS = ?"; private static final String FIELDS = "E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, " + "E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, I.JOB_INSTANCE_ID, I.JOB_NAME"; - private static final String FIELDS_WITH_STEP_COUNT = FIELDS + - ", (SELECT COUNT(*) FROM %PREFIX%STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID) as STEP_COUNT"; - + private static final String FIELDS_WITH_STEP_COUNT = FIELDS + + ", (SELECT COUNT(*) FROM %PREFIX%STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID) as STEP_COUNT"; private static final String GET_RUNNING_EXECUTIONS = "SELECT " + FIELDS - + " from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and E.END_TIME is NULL"; + + " from %PREFIX%JOB_EXECUTION E JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where E.END_TIME is NULL"; private static final String NAME_FILTER = "I.JOB_NAME LIKE ?"; @@ -81,31 +98,59 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement private static final String NAME_AND_STATUS_FILTER = "I.JOB_NAME LIKE ? AND E.STATUS = ?"; - private static final String TASK_EXECUTION_ID_FILTER = - "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.TASK_EXECUTION_ID = ?"; + private static final String TASK_EXECUTION_ID_FILTER = "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.TASK_EXECUTION_ID = ?"; + + private static final String FIND_JOB_EXECUTIONS = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION" + + " from %PREFIX%JOB_EXECUTION where JOB_INSTANCE_ID = ? order by JOB_EXECUTION_ID desc"; + + private static final String GET_LAST_EXECUTION = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION" + + " from %PREFIX%JOB_EXECUTION E where JOB_INSTANCE_ID = ? and JOB_EXECUTION_ID in (SELECT max(JOB_EXECUTION_ID) from %PREFIX%JOB_EXECUTION E2 where E2.JOB_INSTANCE_ID = ?)"; + + private static final String GET_RUNNING_EXECUTIONS_BY_JOB_NAME = "SELECT E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, " + + "E.JOB_INSTANCE_ID from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=? and E.START_TIME is not NULL and E.END_TIME is NULL order by E.JOB_EXECUTION_ID desc"; + + private static final String GET_EXECUTION_BY_ID = "SELECT JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, CREATE_TIME, LAST_UPDATED, VERSION" + + " from %PREFIX%JOB_EXECUTION where JOB_EXECUTION_ID = ?"; + + private static final String FROM_CLAUSE_TASK_TASK_BATCH = "%TASK_PREFIX%TASK_BATCH B"; - private static final String FROM_CLAUSE_TASK_TASK_BATCH = "TASK_TASK_BATCH B"; + private static final String GET_JOB_EXECUTIONS_BY_TASK_IDS = "SELECT JOB_EXECUTION_ID, TASK_EXECUTION_ID from %TASK_PREFIX%TASK_BATCH WHERE TASK_EXECUTION_ID in (?)"; - private PagingQueryProvider allExecutionsPagingQueryProvider; + private static final Logger logger = LoggerFactory.getLogger(JdbcSearchableJobExecutionDao.class); - private PagingQueryProvider byJobNamePagingQueryProvider; - private PagingQueryProvider byStatusPagingQueryProvider; + private DataflowSqlPagingQueryProvider allExecutionsPagingQueryProvider; - private PagingQueryProvider byJobNameAndStatusPagingQueryProvider; + private DataflowSqlPagingQueryProvider byJobNamePagingQueryProvider; - private PagingQueryProvider byJobNameWithStepCountPagingQueryProvider; + private DataflowSqlPagingQueryProvider byStatusPagingQueryProvider; - private PagingQueryProvider executionsWithStepCountPagingQueryProvider; + private DataflowSqlPagingQueryProvider byJobNameAndStatusPagingQueryProvider; - private PagingQueryProvider byDateRangeWithStepCountPagingQueryProvider; - private PagingQueryProvider byJobInstanceIdWithStepCountPagingQueryProvider; + private DataflowSqlPagingQueryProvider byJobNameWithStepCountPagingQueryProvider; - private PagingQueryProvider byTaskExecutionIdWithStepCountPagingQueryProvider; + private DataflowSqlPagingQueryProvider executionsWithStepCountPagingQueryProvider; + + private DataflowSqlPagingQueryProvider byDateRangeWithStepCountPagingQueryProvider; + + private DataflowSqlPagingQueryProvider byJobInstanceIdWithStepCountPagingQueryProvider; + + private DataflowSqlPagingQueryProvider byTaskExecutionIdWithStepCountPagingQueryProvider; + + private final ConfigurableConversionService conversionService; private DataSource dataSource; + private String taskTablePrefix; + + private DatabaseType databaseType; + + public JdbcSearchableJobExecutionDao() { + conversionService = new DefaultConversionService(); + conversionService.addConverter(new StringToLocalDateTimeConverter()); + } + /** * @param dataSource the dataSource to set */ @@ -113,14 +158,16 @@ public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } + public void setTaskTablePrefix(String taskTablePrefix) { + this.taskTablePrefix = taskTablePrefix; + } + /** * @see JdbcJobExecutionDao#afterPropertiesSet() */ @Override public void afterPropertiesSet() throws Exception { - Assert.state(dataSource != null, "DataSource must be provided"); - if (getJdbcTemplate() == null) { setJdbcTemplate(new JdbcTemplate(dataSource)); } @@ -130,61 +177,109 @@ protected long getNextKey() { return 0; } }); - allExecutionsPagingQueryProvider = getPagingQueryProvider(); executionsWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, null); byJobNamePagingQueryProvider = getPagingQueryProvider(NAME_FILTER); byStatusPagingQueryProvider = getPagingQueryProvider(STATUS_FILTER); byJobNameAndStatusPagingQueryProvider = getPagingQueryProvider(NAME_AND_STATUS_FILTER); byJobNameWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, NAME_FILTER); - byDateRangeWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, - DATE_RANGE_FILTER); - byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, - JOB_INSTANCE_ID_FILTER); + byDateRangeWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, DATE_RANGE_FILTER); + byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, null, JOB_INSTANCE_ID_FILTER); byTaskExecutionIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, - FROM_CLAUSE_TASK_TASK_BATCH, TASK_EXECUTION_ID_FILTER); - + getTaskQuery(FROM_CLAUSE_TASK_TASK_BATCH), TASK_EXECUTION_ID_FILTER); + databaseType = getDatabaseType(); super.afterPropertiesSet(); + } + + protected String getTaskQuery(String base) { + return StringUtils.replace(base, "%TASK_PREFIX%", taskTablePrefix); + } + @Override + public List findJobExecutions(JobInstance job) { + Assert.notNull(job, "Job cannot be null."); + Assert.notNull(job.getId(), "Job Id cannot be null."); + + String sqlQuery = FIND_JOB_EXECUTIONS; + return getJdbcTemplate().query(getQuery(sqlQuery), new JobExecutionRowMapper(job), job.getId()); + + } + @Override + public JobExecution getLastJobExecution(JobInstance jobInstance) { + Long id = jobInstance.getId(); + String sqlQuery = GET_LAST_EXECUTION; + List executions = getJdbcTemplate().query(getQuery(sqlQuery), + new JobExecutionRowMapper(jobInstance), id, id); + + Assert.state(executions.size() <= 1, "There must be at most one latest job execution"); + + if (executions.isEmpty()) { + return null; + } + else { + return executions.get(0); + } + } + + @Override + public Set findRunningJobExecutions(String jobName) { + Set result = new HashSet<>(); + String sqlQuery = GET_RUNNING_EXECUTIONS_BY_JOB_NAME; + getJdbcTemplate().query(getQuery(sqlQuery), new JobExecutionRowMapper(), jobName); + + return result; + } + + @Override + public JobExecution getJobExecution(Long executionId) { + try { + String sqlQuery = GET_EXECUTION_BY_ID; + return getJdbcTemplate().queryForObject(getQuery(sqlQuery), new JobExecutionRowMapper(), + executionId); + } + catch (EmptyResultDataAccessException e) { + return null; + } } /** - * @return a {@link PagingQueryProvider} for all job executions + * @return a {@link DataflowSqlPagingQueryProvider} for all job executions * @throws Exception if page provider is not created. */ - private PagingQueryProvider getPagingQueryProvider() throws Exception { + private DataflowSqlPagingQueryProvider getPagingQueryProvider() throws Exception { return getPagingQueryProvider(null); } + /** - * @return a {@link PagingQueryProvider} for all job executions with the - * provided where clause + * @return a {@link DataflowSqlPagingQueryProvider} for all job executions with the provided + * where clause * @throws Exception if page provider is not created. */ - private PagingQueryProvider getPagingQueryProvider(String whereClause) throws Exception { + private DataflowSqlPagingQueryProvider getPagingQueryProvider(String whereClause) throws Exception { return getPagingQueryProvider(null, whereClause); } + /** - * @return a {@link PagingQueryProvider} with a where clause to narrow the - * query + * @return a {@link DataflowSqlPagingQueryProvider} with a where clause to narrow the query * @throws Exception if page provider is not created. */ - private PagingQueryProvider getPagingQueryProvider(String fromClause, String whereClause) throws Exception { + private DataflowSqlPagingQueryProvider getPagingQueryProvider(String fromClause, String whereClause) throws Exception { return getPagingQueryProvider(null, fromClause, whereClause); } /** - * @return a {@link PagingQueryProvider} with a where clause to narrow the - * query + * @return a {@link DataflowSqlPagingQueryProvider} with a where clause to narrow the query * @throws Exception if page provider is not created. */ - private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause) throws Exception { + private DataflowSqlPagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause) + throws Exception { SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); factory.setDataSource(dataSource); fromClause = "%PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I" + (fromClause == null ? "" : ", " + fromClause); factory.setFromClause(getQuery(fromClause)); - if(fields == null) { + if (fields == null) { fields = FIELDS; } factory.setSelectClause(getQuery(fields)); @@ -226,15 +321,16 @@ public int countJobExecutions(BatchStatus status) { */ @Override public int countJobExecutions(String jobName, BatchStatus status) { - return getJdbcTemplate().queryForObject(getQuery(GET_COUNT_BY_JOB_NAME_AND_STATUS), Integer.class, jobName, status.name()); + return getJdbcTemplate().queryForObject(getQuery(GET_COUNT_BY_JOB_NAME_AND_STATUS), Integer.class, jobName, + status.name()); } /** * @see SearchableJobExecutionDao#getJobExecutionsWithStepCount(Date, Date, int, int) */ @Override - public List getJobExecutionsWithStepCount(Date fromDate, - Date toDate, int start, int count) { + public List getJobExecutionsWithStepCount(Date fromDate, Date toDate, int start, + int count) { if (start <= 0) { return getJdbcTemplate().query(byDateRangeWithStepCountPagingQueryProvider.generateFirstPageQuery(count), @@ -244,7 +340,8 @@ public List getJobExecutionsWithStepCount(Date fromDa Long startAfterValue = getJdbcTemplate().queryForObject( byDateRangeWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, fromDate, toDate); - return getJdbcTemplate().query(byDateRangeWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + byDateRangeWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), fromDate, toDate, startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -253,17 +350,19 @@ public List getJobExecutionsWithStepCount(Date fromDa } @Override - public List getJobExecutionsWithStepCountFilteredByJobInstanceId( - int jobInstanceId, int start, int count) { + public List getJobExecutionsWithStepCountFilteredByJobInstanceId(int jobInstanceId, + int start, int count) { if (start <= 0) { - return getJdbcTemplate().query(byJobInstanceIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), + return getJdbcTemplate().query( + byJobInstanceIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), new JobExecutionStepCountRowMapper(), jobInstanceId); } try { Long startAfterValue = getJdbcTemplate().queryForObject( byJobInstanceIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobInstanceId); - return getJdbcTemplate().query(byJobInstanceIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + byJobInstanceIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), jobInstanceId, startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -272,17 +371,19 @@ public List getJobExecutionsWithStepCountFilteredByJo } @Override - public List getJobExecutionsWithStepCountFilteredByTaskExecutionId( - int taskExecutionId, int start, int count) { + public List getJobExecutionsWithStepCountFilteredByTaskExecutionId(int taskExecutionId, + int start, int count) { if (start <= 0) { - return getJdbcTemplate().query(byTaskExecutionIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionStepCountRowMapper(), taskExecutionId); + return getJdbcTemplate().query(SchemaUtilities.getQuery( + byTaskExecutionIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), + this.getTablePrefix()), new JobExecutionStepCountRowMapper(), taskExecutionId); } try { Long startAfterValue = getJdbcTemplate().queryForObject( byTaskExecutionIdWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, taskExecutionId); - return getJdbcTemplate().query(byTaskExecutionIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + byTaskExecutionIdWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), taskExecutionId, startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -295,7 +396,7 @@ public List getJobExecutionsWithStepCountFilteredByTa */ @Override public Collection getRunningJobExecutions() { - return getJdbcTemplate().query(getQuery(GET_RUNNING_EXECUTIONS), new JobExecutionRowMapper()); + return getJdbcTemplate().query(getQuery(GET_RUNNING_EXECUTIONS), new SearchableJobExecutionRowMapper()); } /** @@ -303,52 +404,53 @@ public Collection getRunningJobExecutions() { */ @Override public List getJobExecutions(String jobName, BatchStatus status, int start, int count) { - if (start <= 0) { - return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper(), jobName, status.name()); - } - try { - Long startAfterValue = getJdbcTemplate().queryForObject( - byJobNameAndStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName, status.name()); - return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), jobName, status.name(), startAfterValue); - } - catch (IncorrectResultSizeDataAccessException e) { - return Collections.emptyList(); - } - } - - /** - * @see SearchableJobExecutionDao#getJobExecutions(String, int, int) - */ - @Override - public List getJobExecutions(String jobName, int start, int count) { - if (start <= 0) { - return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper(), jobName); - } - try { - Long startAfterValue = getJdbcTemplate().queryForObject( - byJobNamePagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName); - return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), jobName, startAfterValue); - } - catch (IncorrectResultSizeDataAccessException e) { - return Collections.emptyList(); - } - } + if (start <= 0) { + return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateFirstPageQuery(count), + new SearchableJobExecutionRowMapper(), jobName, status.name()); + } + try { + Long startAfterValue = getJdbcTemplate().queryForObject( + byJobNameAndStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName, + status.name()); + return getJdbcTemplate().query(byJobNameAndStatusPagingQueryProvider.generateRemainingPagesQuery(count), + new SearchableJobExecutionRowMapper(), jobName, status.name(), startAfterValue); + } + catch (IncorrectResultSizeDataAccessException e) { + return Collections.emptyList(); + } + } + + /** + * @see SearchableJobExecutionDao#getJobExecutions(String, int, int) + */ + @Override + public List getJobExecutions(String jobName, int start, int count) { + if (start <= 0) { + return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateFirstPageQuery(count), + new SearchableJobExecutionRowMapper(), jobName); + } + try { + Long startAfterValue = getJdbcTemplate().queryForObject( + byJobNamePagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName); + return getJdbcTemplate().query(byJobNamePagingQueryProvider.generateRemainingPagesQuery(count), + new SearchableJobExecutionRowMapper(), jobName, startAfterValue); + } + catch (IncorrectResultSizeDataAccessException e) { + return Collections.emptyList(); + } + } @Override public List getJobExecutions(BatchStatus status, int start, int count) { if (start <= 0) { return getJdbcTemplate().query(byStatusPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper(), status.name()); + new SearchableJobExecutionRowMapper(), status.name()); } try { Long startAfterValue = getJdbcTemplate().queryForObject( byStatusPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, status.name()); return getJdbcTemplate().query(byStatusPagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), status.name(), startAfterValue); + new SearchableJobExecutionRowMapper(), status.name(), startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { return Collections.emptyList(); @@ -366,7 +468,8 @@ public List getJobExecutionsWithStepCount(String jobN } try { Long startAfterValue = getJdbcTemplate().queryForObject( - byJobNameWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, jobName); + byJobNameWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class, + jobName); return getJdbcTemplate().query(byJobNameWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), jobName, startAfterValue); } @@ -382,13 +485,13 @@ public List getJobExecutionsWithStepCount(String jobN public List getJobExecutions(int start, int count) { if (start <= 0) { return getJdbcTemplate().query(allExecutionsPagingQueryProvider.generateFirstPageQuery(count), - new JobExecutionRowMapper()); + new SearchableJobExecutionRowMapper()); } try { - Long startAfterValue = getJdbcTemplate().queryForObject( - allExecutionsPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class); + Long startAfterValue = getJdbcTemplate() + .queryForObject(allExecutionsPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class); return getJdbcTemplate().query(allExecutionsPagingQueryProvider.generateRemainingPagesQuery(count), - new JobExecutionRowMapper(), startAfterValue); + new SearchableJobExecutionRowMapper(), startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { return Collections.emptyList(); @@ -404,7 +507,8 @@ public List getJobExecutionsWithStepCount(int start, try { Long startAfterValue = getJdbcTemplate().queryForObject( executionsWithStepCountPagingQueryProvider.generateJumpToItemQuery(start, count), Long.class); - return getJdbcTemplate().query(executionsWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), + return getJdbcTemplate().query( + executionsWithStepCountPagingQueryProvider.generateRemainingPagesQuery(count), new JobExecutionStepCountRowMapper(), startAfterValue); } catch (IncorrectResultSizeDataAccessException e) { @@ -412,6 +516,25 @@ public List getJobExecutionsWithStepCount(int start, } } + @Override + public Map> getJobExecutionsByTaskIds(Collection ids) { + JdbcOperations jdbcTemplate = getJdbcTemplate(); + String strIds = StringUtils.collectionToCommaDelimitedString(ids); + + String sql = getTaskQuery(GET_JOB_EXECUTIONS_BY_TASK_IDS).replace("?", strIds); + return jdbcTemplate.query(sql, + rs -> { + final Map> results = new HashMap<>(); + while (rs.next()) { + Long taskExecutionId = rs.getLong("TASK_EXECUTION_ID"); + Long jobExecutionId = rs.getLong("JOB_EXECUTION_ID"); + Set jobs = results.computeIfAbsent(taskExecutionId, aLong -> new HashSet<>()); + jobs.add(jobExecutionId); + } + return results; + }); + } + @Override public void saveJobExecution(JobExecution jobExecution) { throw new UnsupportedOperationException("SearchableJobExecutionDao is read only"); @@ -434,9 +557,9 @@ public void updateJobExecution(JobExecution jobExecution) { * @author Glenn Renfro * */ - protected class JobExecutionRowMapper implements RowMapper { + protected class SearchableJobExecutionRowMapper implements RowMapper { - JobExecutionRowMapper() { + SearchableJobExecutionRowMapper() { } @Override @@ -445,6 +568,7 @@ public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { } } + /** * Re-usable mapper for {@link JobExecutionWithStepCount} instances. * @@ -464,8 +588,57 @@ public JobExecutionWithStepCount mapRow(ResultSet rs, int rowNum) throws SQLExce } + //TODO: Boot3x followup - need to handle LocalDateTime and possibly Integer + protected JobParameters getJobParameters(Long executionId) { + Map> map = new HashMap<>(); + RowCallbackHandler handler = rs -> { + String parameterName = rs.getString("PARAMETER_NAME"); + + Class parameterType = null; + try { + parameterType = Class.forName(rs.getString("PARAMETER_TYPE")); + } + catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + String stringValue = rs.getString("PARAMETER_VALUE"); + Object typedValue = conversionService.convert(stringValue, parameterType); + + boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y"); + + if (typedValue instanceof String) { + map.put(parameterName, new JobParameter(typedValue, String.class, identifying)); + } + else if (typedValue instanceof Integer integer) { + map.put(parameterName, new JobParameter(integer.longValue(), Integer.class, identifying)); + } + else if (typedValue instanceof Long) { + map.put(parameterName, new JobParameter(typedValue, Long.class, identifying)); + } + else if (typedValue instanceof Float float1) { + map.put(parameterName, new JobParameter(float1.doubleValue(), Float.class, identifying)); + } + else if (typedValue instanceof Double) { + map.put(parameterName, new JobParameter(typedValue, Double.class, identifying)); + } + else if (typedValue instanceof Timestamp timestamp) { + map.put(parameterName, new JobParameter(new Date(timestamp.getTime()), Timestamp.class, identifying)); + } + else if (typedValue instanceof Date) { + map.put(parameterName, new JobParameter(typedValue, Date.class, identifying)); + } + else { + map.put(parameterName, + new JobParameter(typedValue != null ? typedValue.toString() : "null", String.class, identifying)); + } + }; + + getJdbcTemplate().query(getQuery(FIND_PARAMS_FROM_ID), handler, executionId); + + return new JobParameters(map); + } - JobExecution createJobExecutionFromResultSet(ResultSet rs, int rowNum) throws SQLException{ + JobExecution createJobExecutionFromResultSet(ResultSet rs, int rowNum) throws SQLException { Long id = rs.getLong(1); JobExecution jobExecution; @@ -475,13 +648,80 @@ JobExecution createJobExecutionFromResultSet(ResultSet rs, int rowNum) throws S jobExecution = new JobExecution(jobInstance, jobParameters); jobExecution.setId(id); - jobExecution.setStartTime(rs.getTimestamp(2)); - jobExecution.setEndTime(rs.getTimestamp(3)); + jobExecution.setStartTime(getLocalDateTime(2, rs)); + jobExecution.setEndTime(getLocalDateTime(3, rs)); jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4))); jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6))); - jobExecution.setCreateTime(rs.getTimestamp(7)); - jobExecution.setLastUpdated(rs.getTimestamp(8)); + jobExecution.setCreateTime(getLocalDateTime(7, rs)); + jobExecution.setLastUpdated(getLocalDateTime(8, rs)); jobExecution.setVersion(rs.getInt(9)); return jobExecution; } + + private LocalDateTime getLocalDateTime(int columnIndex, ResultSet rs) throws SQLException { + LocalDateTime result = null; + // TODO: When the DB2 driver can support LocalDateTime, remove this if block. + if (databaseType == DatabaseType.DB2) { + try { + result = rs.getObject(columnIndex, LocalDateTime.class); + } catch (NullPointerException npe) { + if (!npe.getMessage().contains("java.sql.Timestamp.toLocalDateTime()\" because \"\" is null")) { + throw npe; + } + logger.debug("DB2 threw a NPE because it fails to handle an empty column for a java LocalDateTime . SCDF returns a null for this column."); + } + } else { + result = rs.getObject(columnIndex, LocalDateTime.class); + } + return result; + } + + private DatabaseType getDatabaseType() throws SQLException { + DatabaseType databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + return databaseType; + } + + private final class JobExecutionRowMapper implements RowMapper { + + private JobInstance jobInstance; + + public JobExecutionRowMapper() { + + } + + public JobExecutionRowMapper(JobInstance jobInstance) { + this.jobInstance = jobInstance; + } + + @Override + public JobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + Long id = rs.getLong(1); + JobParameters jobParameters = getJobParameters(id); + JobExecution jobExecution; + if (jobInstance == null) { + jobExecution = new JobExecution(id, jobParameters); + } + else { + jobExecution = new JobExecution(jobInstance, id, jobParameters); + } + Timestamp startTime = rs.getTimestamp(2); + Timestamp endTime = rs.getTimestamp(3); + Timestamp lastUpdatedTime = rs.getTimestamp(8); + jobExecution.setStartTime((startTime != null) ? startTime.toLocalDateTime() : null); + jobExecution.setEndTime((endTime != null) ? endTime.toLocalDateTime() : null); + jobExecution.setStatus(BatchStatus.valueOf(rs.getString(4))); + jobExecution.setExitStatus(new ExitStatus(rs.getString(5), rs.getString(6))); + jobExecution.setCreateTime(rs.getTimestamp(7).toLocalDateTime()); + jobExecution.setLastUpdated((lastUpdatedTime != null) ? lastUpdatedTime.toLocalDateTime() : null); + jobExecution.setVersion(rs.getInt(9)); + return jobExecution; + } + + } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobInstanceDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobInstanceDao.java index 8b69d801dd..96db8901fb 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobInstanceDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobInstanceDao.java @@ -34,7 +34,7 @@ public class JdbcSearchableJobInstanceDao extends JdbcJobInstanceDao implements @Override public void afterPropertiesSet() throws Exception { - setJobIncrementer(new AbstractDataFieldMaxValueIncrementer() { + setJobInstanceIncrementer(new AbstractDataFieldMaxValueIncrementer() { @Override protected long getNextKey() { return 0; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java index b0638524c0..e5c7d25513 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableStepExecutionDao.java @@ -1,5 +1,5 @@ /* - * Copyright 2006-2013 the original author or authors. + * Copyright 2006-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Timestamp; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -34,9 +35,8 @@ import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; import org.springframework.batch.core.repository.dao.JdbcStepExecutionDao; import org.springframework.batch.item.database.Order; -import org.springframework.batch.item.database.PagingQueryProvider; -import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; import org.springframework.batch.support.PatternMatcher; +import org.springframework.cloud.dataflow.server.batch.support.SqlPagingQueryProviderFactoryBean; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; @@ -137,7 +137,8 @@ public Collection findStepExecutions(String jobName, String stepN whereClause = whereClause + " AND STEP_NAME = ?"; } - PagingQueryProvider queryProvider = getPagingQueryProvider(whereClause); + DataflowSqlPagingQueryProvider queryProvider = getPagingQueryProvider(whereClause); + List stepExecutions; if (start <= 0) { @@ -174,11 +175,9 @@ public int countStepExecutionsForJobExecution(long jobExecutionId) { } /** - * @return a {@link PagingQueryProvider} with a where clause to narrow the - * query - * @throws Exception + * @return a {@link DataflowSqlPagingQueryProvider} with a where clause to narrow the query */ - private PagingQueryProvider getPagingQueryProvider(String whereClause) { + private DataflowSqlPagingQueryProvider getPagingQueryProvider(String whereClause) { SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); factory.setDataSource(dataSource); factory.setFromClause(getQuery("%PREFIX%STEP_EXECUTION S, %PREFIX%JOB_EXECUTION J, %PREFIX%JOB_INSTANCE I")); @@ -191,7 +190,7 @@ private PagingQueryProvider getPagingQueryProvider(String whereClause) { + " AND S.JOB_EXECUTION_ID = J.JOB_EXECUTION_ID AND J.JOB_INSTANCE_ID = I.JOB_INSTANCE_ID"); } try { - return (PagingQueryProvider) factory.getObject(); + return factory.getObject(); } catch (Exception e) { throw new IllegalStateException("Unexpected exception creating paging query provide", e); @@ -203,8 +202,11 @@ private static class StepExecutionRowMapper implements RowMapper public StepExecution mapRow(ResultSet rs, int rowNum) throws SQLException { StepExecution stepExecution = new StepExecution(rs.getString(2), null); stepExecution.setId(rs.getLong(1)); - stepExecution.setStartTime(rs.getTimestamp(3)); - stepExecution.setEndTime(rs.getTimestamp(4)); + Timestamp startTimeStamp = rs.getTimestamp(3); + Timestamp endTimeStamp = rs.getTimestamp(4); + + stepExecution.setStartTime((startTimeStamp == null) ? null : startTimeStamp.toLocalDateTime()); + stepExecution.setEndTime((endTimeStamp == null) ? null : endTimeStamp.toLocalDateTime()); stepExecution.setStatus(BatchStatus.valueOf(rs.getString(5))); stepExecution.setCommitCount(rs.getInt(6)); stepExecution.setReadCount(rs.getInt(7)); @@ -215,7 +217,7 @@ public StepExecution mapRow(ResultSet rs, int rowNum) throws SQLException { stepExecution.setWriteSkipCount(rs.getInt(13)); stepExecution.setProcessSkipCount(rs.getInt(14)); stepExecution.setRollbackCount(rs.getInt(15)); - stepExecution.setLastUpdated(rs.getTimestamp(16)); + stepExecution.setLastUpdated(rs.getTimestamp(16).toLocalDateTime()); stepExecution.setVersion(rs.getInt(17)); return stepExecution; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobRestartRuntimeException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobRestartRuntimeException.java new file mode 100644 index 0000000000..1a1d0ece9c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobRestartRuntimeException.java @@ -0,0 +1,28 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch; + +public class JobRestartRuntimeException extends RuntimeException { + + public JobRestartRuntimeException(Long jobExecutionId, Exception cause) { + super(String.format("JobExecutionId '%d' was not restarted.", jobExecutionId), cause); + } + + public JobRestartRuntimeException(Long jobExecutionId) { + super(String.format("JobExecutionId '%d' was not restarted.", jobExecutionId)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java index 3cfa2157ec..3b33593ad5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java @@ -1,5 +1,5 @@ /* - * Copyright 2009-2010 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,8 @@ import java.util.Collection; import java.util.Date; +import java.util.Map; +import java.util.Set; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Job; @@ -32,7 +34,6 @@ import org.springframework.batch.core.launch.NoSuchJobInstanceException; import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; -import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.JobRestartException; import org.springframework.batch.core.step.NoSuchStepException; import org.springframework.batch.core.step.tasklet.Tasklet; @@ -45,54 +46,10 @@ * * @author Dave Syer * @author Glenn Renfro - * + * @author Corneil du Plessis */ public interface JobService { - /** - * Launch a job with the parameters provided. If an instance with the parameters provided - * has already failed (and is not abandoned) it will be restarted. - * - * @param jobName the job name - * @param params the {@link JobParameters} - * @return the resulting {@link JobExecution} if successful - * - * @throws NoSuchJobException thrown if job specified does not exist - * @throws JobExecutionAlreadyRunningException thrown if job is already executing - * @throws JobRestartException thrown if job failed to restart - * @throws JobInstanceAlreadyCompleteException thrown if job was already complete - * @throws JobParametersInvalidException thrown if job parameters are invalid - */ - JobExecution launch(String jobName, JobParameters params) throws NoSuchJobException, - JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, - JobParametersInvalidException; - - /** - * Get the last {@link JobParameters} used to execute a job successfully. - * - * @param jobName the name of the job - * @return the last parameters used to execute this job or empty if there are none - * - * @throws NoSuchJobException thrown if job specified does not exist - */ - JobParameters getLastJobParameters(String jobName) throws NoSuchJobException; - - /** - * Launch a job with the parameters provided. - * - * @param jobExecutionId the job execution to restart - * @return the resulting {@link JobExecution} if successful - * - * @throws NoSuchJobExecutionException thrown if job execution specified does not exist - * @throws NoSuchJobException thrown if job specified does not exist - * @throws JobExecutionAlreadyRunningException thrown if job is already executing - * @throws JobRestartException thrown if job failed to restart - * @throws JobInstanceAlreadyCompleteException thrown if job was already complete - * @throws JobParametersInvalidException thrown if job parameters are invalid - */ - JobExecution restart(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException, - JobRestartException, JobInstanceAlreadyCompleteException, NoSuchJobException, JobParametersInvalidException; - /** * Launch a job with the parameters provided. JSR-352 supports restarting of jobs with a * new set of parameters. This method exposes this functionality @@ -127,37 +84,6 @@ JobExecution restart(Long jobExecutionId, JobParameters params) */ JobExecution stop(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException; - /** - * Mark the {@link JobExecution} as ABANDONED. If a stop signal is ignored because the - * process died this is the best way to mark a job as finished with (as opposed to - * STOPPED). An abandoned job execution can be restarted, but a stopping one cannot. - * - * @param jobExecutionId the job execution id to abort - * @return the {@link JobExecution} that was aborted - * @throws NoSuchJobExecutionException thrown if job execution specified does not exist - * @throws JobExecutionAlreadyRunningException thrown if the job is running (it should be - * stopped first) - */ - JobExecution abandon(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionAlreadyRunningException; - - /** - * Query the job names in the system, either launchable or not. If not launchable, then - * there must be a history of the job having been launched previously in the - * {@link JobRepository}. - * - * @param start the start index of the job names to return - * @param count the maximum number of job names to return - * @return a collection of job names - */ - Collection listJobs(int start, int count); - - /** - * Count the total number of jobs that can be returned by {@link #listJobs(int, int)}. - * - * @return the total number of jobs - */ - int countJobs(); - /** * Get a {@link JobInstance job instance} by id. * @@ -177,17 +103,8 @@ JobExecution restart(Long jobExecutionId, JobParameters params) * @return a collection of {@link JobInstance job instances} * @throws NoSuchJobException thrown if job specified does not exist */ - Collection listJobInstances(String jobName, int start, int count) throws NoSuchJobException; - /** - * Count the number of {@link JobInstance job instances} in the repository for a given job - * name. - * - * @param jobName the name of the job - * @return the number of job instances available - * @throws NoSuchJobException thrown if job specified does not exist - */ - int countJobInstances(String jobName) throws NoSuchJobException; + Collection listJobInstances(String jobName, int start, int count) throws NoSuchJobException; /** * List the {@link JobExecutionWithStepCount job executions} for a job in descending order @@ -272,7 +189,8 @@ Collection getJobExecutionsForJobInstance(String jobName, Long job * @throws NoSuchJobExecutionException thrown if job execution specified does not exist */ Collection getStepExecutions(Long jobExecutionId) throws NoSuchJobExecutionException; - + Collection getStepExecutions(JobExecution jobExecution) throws NoSuchJobExecutionException; + void addStepExecutions(JobExecution jobExecution); /** * List the {@link StepExecution step executions} for a step in descending order of * creation (usually close to execution order). @@ -298,14 +216,6 @@ Collection listStepExecutionsForStep(String jobName, String stepN */ int countStepExecutionsForStep(String jobName, String stepName) throws NoSuchStepException; - /** - * Count the step executions in the repository for a given job execution. - * @param jobExecutionId the id of the job execution. - * - * @return the number of executions. - */ - int countStepExecutionsForJobExecution(long jobExecutionId); - /** * Locate a {@link StepExecution} from its id and that of its parent {@link JobExecution}. * @@ -319,23 +229,7 @@ Collection listStepExecutionsForStep(String jobName, String stepN */ StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException; - - /** - * Send a stop signal to all running job executions. - * - * @return the number of executions affected - */ - int stopAll(); - - /** - * Get the names of the steps in a job (or a historical list of recent execution names if - * the Job is not launchable). - * - * @param jobName the name of the job - * @return {@link Collection} of step names. - * @throws NoSuchJobException thrown if the job name cannot be located - */ - Collection getStepNamesForJob(String jobName) throws NoSuchJobException; + StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) throws NoSuchStepExecutionException; /** * List the {@link JobExecution job executions} for a job in descending order of creation @@ -385,4 +279,11 @@ Collection listJobExecutionsForJobWithStepCount(Date * @return a collection of {@link JobExecutionWithStepCount} */ Collection listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(int taskExecutionId, int start, int count); + + /** + * Returns a collection job execution ids given a collection of task execution ids that is mapped by id. + * @param taskExecutionId Collection of task execution ids that requestor to search for associated Job Ids. + * @return Map with the task execution id as the key and the set of job execution ids as values. + */ + Map> getJobExecutionIdsByTaskExecutionIds(Collection taskExecutionId); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStartRuntimeException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStartRuntimeException.java new file mode 100644 index 0000000000..775b4ca1bb --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStartRuntimeException.java @@ -0,0 +1,28 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch; + +public class JobStartRuntimeException extends RuntimeException { + + public JobStartRuntimeException(String jobName, Exception cause) { + super(String.format("JobExecutionId '%s' was not started.", jobName), cause); + } + + public JobStartRuntimeException(Long jobExecutionId) { + super(String.format("JobExecutionId '%s' was not started.", jobExecutionId)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStopException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStopException.java new file mode 100644 index 0000000000..de32194a59 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobStopException.java @@ -0,0 +1,28 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch; + +public class JobStopException extends RuntimeException { + + public JobStopException(Long jobExecutionId, Exception cause) { + super(String.format("JobExecutionId '%d' was not stopped.", jobExecutionId), cause); + } + + public JobStopException(Long jobExecutionId) { + super(String.format("JobExecutionId '%d' was not stopped.", jobExecutionId)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java index 9fdf66da96..83f619e9b2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SearchableJobExecutionDao.java @@ -18,6 +18,8 @@ import java.util.Collection; import java.util.Date; import java.util.List; +import java.util.Map; +import java.util.Set; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; @@ -25,7 +27,7 @@ /** * @author Dave Syer - * + * @author Corneil du Plessis */ public interface SearchableJobExecutionDao extends JobExecutionDao { @@ -99,6 +101,11 @@ public interface SearchableJobExecutionDao extends JobExecutionDao { */ List getJobExecutionsWithStepCount(int start, int count); + /** + * @param ids the set of task execution ids. + * @return Map with the TaskExecution id as the key and the set of job execution ids as values. + */ + Map> getJobExecutionsByTaskIds(Collection ids); /** * Gets count of job executions. * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java index cf509e0fcb..da5b03ac17 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java @@ -1,5 +1,5 @@ /* - * Copyright 2009-2019 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,41 +15,29 @@ */ package org.springframework.cloud.dataflow.server.batch; -import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashSet; import java.util.List; -import java.util.Properties; +import java.util.Map; +import java.util.Objects; import java.util.Set; -import javax.batch.operations.JobOperator; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobInstance; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.launch.JobExecutionNotRunningException; +import org.springframework.batch.core.launch.JobOperator; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.batch.core.launch.NoSuchJobInstanceException; -import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.dao.ExecutionContextDao; import org.springframework.batch.core.step.NoSuchStepException; -import org.springframework.beans.factory.DisposableBean; -import org.springframework.core.io.Resource; -import org.springframework.core.io.support.PathMatchingResourcePatternResolver; -import org.springframework.scheduling.annotation.Scheduled; -import org.springframework.util.CollectionUtils; +import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** @@ -59,9 +47,9 @@ * @author Dave Syer * @author Michael Minella * @author Glenn Renfro - * + * @author Corneil du Plessis */ -public class SimpleJobService implements JobService, DisposableBean { +public class SimpleJobService implements JobService { private static final Logger logger = LoggerFactory.getLogger(SimpleJobService.class); @@ -78,28 +66,21 @@ public class SimpleJobService implements JobService, DisposableBean { private final ExecutionContextDao executionContextDao; - private Collection activeExecutions = Collections.synchronizedList(new ArrayList()); + private JobOperator jobOperator; - private JobOperator jsrJobOperator; private int shutdownTimeout = DEFAULT_SHUTDOWN_TIMEOUT; public SimpleJobService(SearchableJobInstanceDao jobInstanceDao, SearchableJobExecutionDao jobExecutionDao, - SearchableStepExecutionDao stepExecutionDao, JobRepository jobRepository, - ExecutionContextDao executionContextDao, JobOperator jsrJobOperator) { + SearchableStepExecutionDao stepExecutionDao, JobRepository jobRepository, + ExecutionContextDao executionContextDao, JobOperator jobOperator) { super(); this.jobInstanceDao = jobInstanceDao; this.jobExecutionDao = jobExecutionDao; this.stepExecutionDao = stepExecutionDao; this.jobRepository = jobRepository; this.executionContextDao = executionContextDao; - - if (jsrJobOperator == null) { - logger.warn("No JobOperator compatible with JSR-352 was provided."); - } - else { - this.jsrJobOperator = jsrJobOperator; - } + this.jobOperator = Objects.requireNonNull(jobOperator, "jobOperator must not be null"); } /** @@ -118,26 +99,20 @@ public Collection getStepExecutions(Long jobExecutionId) throws N if (jobExecution == null) { throw new NoSuchJobExecutionException("No JobExecution with id=" + jobExecutionId); } + return getStepExecutions(jobExecution); - stepExecutionDao.addStepExecutions(jobExecution); + } + @Override + public Collection getStepExecutions(JobExecution jobExecution) { + Assert.notNull(jobExecution, "jobExecution required"); + stepExecutionDao.addStepExecutions(jobExecution); return jobExecution.getStepExecutions(); - } - /** - * Delegates launching to - * {@link org.springframework.cloud.dataflow.server.batch.SimpleJobService#restart(Long, org.springframework.batch.core.JobParameters)} - * - * @param jobExecutionId the job execution to restart - * @return Instance of {@link JobExecution} associated with the restart. - * - * @throws NoSuchJobException thrown if job does not exist - */ @Override - public JobExecution restart(Long jobExecutionId) - throws NoSuchJobException { - return restart(jobExecutionId, null); + public void addStepExecutions(JobExecution jobExecution) { + stepExecutionDao.addStepExecutions(jobExecution); } @Override @@ -145,53 +120,14 @@ public JobExecution restart(Long jobExecutionId, JobParameters params) throws No JobExecution jobExecution; - if (jsrJobOperator != null) { - if (params != null) { - jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, params.toProperties())); - } - else { - jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, new Properties())); - } - } - else { - throw new NoSuchJobException(String.format("Can't find job associated with job execution id %s to restart", - String.valueOf(jobExecutionId))); - } - - return jobExecution; - } - - @Override - public JobExecution launch(String jobName, JobParameters jobParameters) throws NoSuchJobException { - JobExecution jobExecution; - - if (jsrJobOperator != null) { - jobExecution = new JobExecution(jsrJobOperator.start(jobName, jobParameters.toProperties())); - } - else { - throw new NoSuchJobException(String.format("Unable to find job %s to launch", - String.valueOf(jobName))); - } - - return jobExecution; - } - - @Override - public JobParameters getLastJobParameters(String jobName) { - Collection executions = jobExecutionDao.getJobExecutions(jobName, null, 0, 1); - - JobExecution lastExecution = null; - if (!CollectionUtils.isEmpty(executions)) { - lastExecution = executions.iterator().next(); - } - - JobParameters oldParameters = new JobParameters(); - if (lastExecution != null) { - oldParameters = lastExecution.getJobParameters(); - } - - return oldParameters; + try { + jobExecution = new JobExecution(jobOperator.restart(jobExecutionId.longValue())); + } + catch (Exception e) { + throw new JobRestartRuntimeException(jobExecutionId, e); + } + return jobExecution; } @Override @@ -209,103 +145,23 @@ public int countJobExecutions() { return jobExecutionDao.countJobExecutions(); } - @Override - public Collection listJobs(int start, int count) { - Collection jobNames = jobInstanceDao.getJobNames(); - return new ArrayList<>(jobNames).subList(start, start + count); - } - - private Collection getJsrJobNames() { - Set jsr352JobNames = new HashSet(); - - try { - PathMatchingResourcePatternResolver pathMatchingResourcePatternResolver = new org.springframework.core.io.support.PathMatchingResourcePatternResolver(); - Resource[] resources = pathMatchingResourcePatternResolver - .getResources("classpath*:/META-INF/batch-jobs/**/*.xml"); - - for (Resource resource : resources) { - String jobXmlFileName = resource.getFilename(); - jsr352JobNames.add(jobXmlFileName.substring(0, jobXmlFileName.length() - 4)); - } - } - catch (IOException e) { - logger.debug("Unable to list JSR-352 batch jobs", e); - } - - return jsr352JobNames; - } - - @Override - public int countJobs() { - return jobInstanceDao.getJobNames().size(); - } @Override - public int stopAll() { - Collection result = jobExecutionDao.getRunningJobExecutions(); - Collection jsrJobNames = getJsrJobNames(); - - for (JobExecution jobExecution : result) { - if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) { - jsrJobOperator.stop(jobExecution.getId()); - } - else { - jobExecution.stop(); - jobRepository.update(jobExecution); - } - } - - return result.size(); + public JobExecution stop(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException { + return stopJobExecution(getJobExecution(jobExecutionId)); } - @Override - public JobExecution stop(Long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException { - JobExecution jobExecution = getJobExecution(jobExecutionId); + private JobExecution stopJobExecution(JobExecution jobExecution) throws JobExecutionNotRunningException{ if (!jobExecution.isRunning()) { throw new JobExecutionNotRunningException("JobExecution is not running and therefore cannot be stopped"); } - - logger.info("Stopping job execution: " + jobExecution); - - Collection jsrJobNames = getJsrJobNames(); - - if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) { - jsrJobOperator.stop(jobExecutionId); - jobExecution = getJobExecution(jobExecutionId); - } - else { - jobExecution.stop(); - jobRepository.update(jobExecution); - } - return jobExecution; - - } - - @Override - public JobExecution abandon(Long jobExecutionId) throws NoSuchJobExecutionException, - JobExecutionAlreadyRunningException { - - JobExecution jobExecution = getJobExecution(jobExecutionId); - if (jobExecution.getStatus().isLessThan(BatchStatus.STOPPING)) { - throw new JobExecutionAlreadyRunningException( - "JobExecution is running or complete and therefore cannot be aborted"); - } - - logger.info("Aborting job execution: " + jobExecution); - - Collection jsrJobNames = getJsrJobNames(); - - JobInstance jobInstance = jobExecution.getJobInstance(); - if (jsrJobOperator != null && jsrJobNames.contains(jobInstance.getJobName())) { - jsrJobOperator.abandon(jobExecutionId); - jobExecution = getJobExecution(jobExecutionId); - } - else { - jobExecution.upgradeStatus(BatchStatus.ABANDONED); - jobExecution.setEndTime(new Date()); - jobRepository.update(jobExecution); - } - + // Indicate the execution should be stopped by setting it's status to + // 'STOPPING'. It is assumed that + // the step implementation will check this status at chunk boundaries. + logger.info("Stopping job execution: {}", jobExecution); + jobExecution.getStepExecutions().forEach(StepExecution::setTerminateOnly); + jobExecution.setStatus(BatchStatus.STOPPING); + jobRepository.update(jobExecution); return jobExecution; } @@ -316,24 +172,16 @@ public int countJobExecutionsForJob(String name, BatchStatus status) throws NoSu } private int countJobExecutions(String jobName, BatchStatus status) throws NoSuchJobException { - if (StringUtils.isEmpty(jobName)) { - if (status != null) { - return jobExecutionDao.countJobExecutions(status); - } - } - else { - if (status != null) { - return jobExecutionDao.countJobExecutions(jobName, status); + if (!StringUtils.hasText(jobName)) { + if (status == null) { + throw new IllegalArgumentException("One of jobName or status must be specified"); } + return jobExecutionDao.countJobExecutions(status); } - checkJobExists(jobName); - return jobExecutionDao.countJobExecutions(jobName); - } - - @Override - public int countJobInstances(String name) { - return jobInstanceDao.countJobInstances(name); + return (status != null) ? + jobExecutionDao.countJobExecutions(jobName, status) : + jobExecutionDao.countJobExecutions(jobName); } @Override @@ -357,8 +205,7 @@ public JobExecution getJobExecution(Long jobExecutionId) throws NoSuchJobExecuti public Collection getJobExecutionsForJobInstance(String name, Long jobInstanceId) throws NoSuchJobException { checkJobExists(name); - List jobExecutions = jobExecutionDao.findJobExecutions(jobInstanceDao - .getJobInstance(jobInstanceId)); + List jobExecutions = jobExecutionDao.findJobExecutions(Objects.requireNonNull(jobInstanceDao.getJobInstance(jobInstanceId))); for (JobExecution jobExecution : jobExecutions) { stepExecutionDao.addStepExecutions(jobExecution); } @@ -369,15 +216,19 @@ public Collection getJobExecutionsForJobInstance(String name, Long public StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchJobExecutionException, NoSuchStepExecutionException { JobExecution jobExecution = getJobExecution(jobExecutionId); + return getStepExecution(jobExecution, stepExecutionId); + } + + @Override + public StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) throws NoSuchStepExecutionException { StepExecution stepExecution = stepExecutionDao.getStepExecution(jobExecution, stepExecutionId); if (stepExecution == null) { - throw new NoSuchStepExecutionException("There is no StepExecution with jobExecutionId=" + jobExecutionId + throw new NoSuchStepExecutionException("There is no StepExecution with jobExecutionId=" + jobExecution.getId() + " and id=" + stepExecutionId); } try { stepExecution.setExecutionContext(executionContextDao.getExecutionContext(stepExecution)); - } - catch (Exception e) { + } catch (Exception e) { logger.info("Cannot load execution context for step execution: " + stepExecution); } return stepExecution; @@ -385,7 +236,7 @@ public StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) @Override public Collection listJobExecutionsForJobWithStepCount(String jobName, int start, - int count) + int count) throws NoSuchJobException { checkJobExists(jobName); return jobExecutionDao.getJobExecutionsWithStepCount(jobName, start, count); @@ -405,11 +256,6 @@ public int countStepExecutionsForStep(String jobName, String stepName) throws No return stepExecutionDao.countStepExecutions(jobName, stepName); } - @Override - public int countStepExecutionsForJobExecution(long jobExecutionId) { - return stepExecutionDao.countStepExecutionsForJobExecution(jobExecutionId); - } - @Override public JobInstance getJobInstance(long jobInstanceId) throws NoSuchJobInstanceException { JobInstance jobInstance = jobInstanceDao.getJobInstance(jobInstanceId); @@ -425,20 +271,9 @@ public Collection listJobInstances(String jobName, int start, int c return jobInstanceDao.getJobInstances(jobName, start, count); } - @Override - public Collection getStepNamesForJob(String jobName) throws NoSuchJobException { - Collection stepNames = new LinkedHashSet<>(); - for (JobExecution jobExecution : listJobExecutionsForJob(jobName, null, 0, 100)) { - for (StepExecution stepExecution : jobExecution.getStepExecutions()) { - stepNames.add(stepExecution.getStepName()); - } - } - return Collections.unmodifiableList(new ArrayList<>(stepNames)); - } - @Override public Collection listJobExecutionsForJob(String jobName, BatchStatus status, int pageOffset, - int pageSize) { + int pageSize) { List jobExecutions = getJobExecutions(jobName, status, pageOffset, pageSize); for (JobExecution jobExecution : jobExecutions) { @@ -450,8 +285,8 @@ public Collection listJobExecutionsForJob(String jobName, BatchSta @Override public Collection listJobExecutionsForJobWithStepCount(Date fromDate, - Date toDate, int start, int count) { - return jobExecutionDao.getJobExecutionsWithStepCount(fromDate, toDate, start, count); + Date toDate, int start, int count) { + return jobExecutionDao.getJobExecutionsWithStepCount(fromDate, toDate, start, count); } @Override @@ -466,13 +301,17 @@ public Collection listJobExecutionsForJobWithStepCoun return jobExecutionDao.getJobExecutionsWithStepCountFilteredByTaskExecutionId(taskExecutionId, start, count); } + @Override + public Map> getJobExecutionIdsByTaskExecutionIds(Collection taskExecutionIds) { + return this.jobExecutionDao.getJobExecutionsByTaskIds(taskExecutionIds); + } + private List getJobExecutions(String jobName, BatchStatus status, int pageOffset, int pageSize) { if (StringUtils.isEmpty(jobName)) { if (status != null) { return jobExecutionDao.getJobExecutions(status, pageOffset, pageSize); } - } - else { + } else { if (status != null) { return jobExecutionDao.getJobExecutions(jobName, status, pageOffset, pageSize); } @@ -482,72 +321,8 @@ private List getJobExecutions(String jobName, BatchStatus status, } private void checkJobExists(String jobName) throws NoSuchJobException { - if (getJsrJobNames().stream().anyMatch(e -> e.contains(jobName)) || - jobInstanceDao.countJobInstances(jobName) > 0) { - return; + if (jobInstanceDao.countJobInstances(jobName) <= 0) { + throw new NoSuchJobException("No Job with that name either current or historic: [" + jobName + "]"); } - throw new NoSuchJobException("No Job with that name either current or historic: [" + jobName + "]"); } - - /** - * Stop all the active jobs and wait for them (up to a time out) to finish processing. - */ - @Override - public void destroy() throws Exception { - - Exception firstException = null; - - for (JobExecution jobExecution : activeExecutions) { - try { - if (jobExecution.isRunning()) { - stop(jobExecution.getId()); - } - } - catch (JobExecutionNotRunningException e) { - logger.info("JobExecution is not running so it cannot be stopped"); - } - catch (Exception e) { - logger.error("Unexpected exception stopping JobExecution", e); - if (firstException == null) { - firstException = e; - } - } - } - - int count = 0; - int maxCount = (shutdownTimeout + 1000) / 1000; - while (!activeExecutions.isEmpty() && ++count < maxCount) { - logger.error("Waiting for " + activeExecutions.size() + " active executions to complete"); - removeInactiveExecutions(); - Thread.sleep(1000L); - } - - if (firstException != null) { - throw firstException; - } - - } - - /** - * Check all the active executions and see if they are still actually running. Remove the - * ones that have completed. - */ - @Scheduled(fixedDelay = 60000) - public void removeInactiveExecutions() { - - for (Iterator iterator = activeExecutions.iterator(); iterator.hasNext();) { - JobExecution jobExecution = iterator.next(); - try { - jobExecution = getJobExecution(jobExecution.getId()); - } - catch (NoSuchJobExecutionException e) { - logger.error("Unexpected exception loading JobExecution", e); - } - if (!jobExecution.isRunning()) { - iterator.remove(); - } - } - - } - } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java index 19cf8110f3..97a49293f1 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java @@ -1,5 +1,5 @@ /* - * Copyright 2009-2019 the original author or authors. + * Copyright 2009-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,16 +16,17 @@ package org.springframework.cloud.dataflow.server.batch; import java.sql.Types; +import java.util.Locale; import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.batch.core.configuration.support.MapJobRegistry; import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.jsr.JsrJobParametersConverter; -import org.springframework.batch.core.jsr.launch.JsrJobOperator; import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.launch.support.SimpleJobOperator; import org.springframework.batch.core.repository.ExecutionContextSerializer; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; @@ -39,6 +40,9 @@ import org.springframework.batch.support.DatabaseType; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.context.EnvironmentAware; +import org.springframework.core.env.Environment; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.lob.DefaultLobHandler; @@ -52,9 +56,10 @@ * ingredients as convenient as possible. * * @author Dave Syer + * @author Corneil du Plessis * */ -public class SimpleJobServiceFactoryBean implements FactoryBean, InitializingBean { +public class SimpleJobServiceFactoryBean implements FactoryBean, InitializingBean, EnvironmentAware { private static final Logger logger = LoggerFactory.getLogger(SimpleJobServiceFactoryBean.class); @@ -65,6 +70,7 @@ public class SimpleJobServiceFactoryBean implements FactoryBean, Ini private String databaseType; private String tablePrefix = AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX; + private String taskTablePrefix = "TASK_"; private DataFieldMaxValueIncrementerFactory incrementerFactory; @@ -82,6 +88,10 @@ public class SimpleJobServiceFactoryBean implements FactoryBean, Ini private PlatformTransactionManager transactionManager; + private JobService jobService; + + private Environment environment; + public void setTransactionManager(PlatformTransactionManager transactionManager) { this.transactionManager = transactionManager; } @@ -112,6 +122,11 @@ public void setMaxVarCharLength(int maxVarCharLength) { this.maxVarCharLength = maxVarCharLength; } + @Override + public void setEnvironment(Environment environment) { + this.environment = environment; + } + /** * Public setter for the {@link DataSource}. * @param dataSource a {@link DataSource} @@ -136,6 +151,19 @@ public void setTablePrefix(String tablePrefix) { this.tablePrefix = tablePrefix; } + public void setTaskTablePrefix(String taskTablePrefix) { + this.taskTablePrefix = taskTablePrefix; + } + + /** + * Sets the {@link JobService} for the factory bean. + * @param jobService the JobService for this Factory Bean. + */ + public void setJobService(JobService jobService) { + this.jobService = jobService; + } + + /** * A factory for incrementers (used to build primary keys for meta data). Defaults to * {@link DefaultDataFieldMaxValueIncrementerFactory}. @@ -187,7 +215,7 @@ public void afterPropertiesSet() throws Exception { jdbcTemplate = new JdbcTemplate(dataSource); if (incrementerFactory == null) { - incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); + incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); } if (databaseType == null) { @@ -212,7 +240,7 @@ public void afterPropertiesSet() throws Exception { protected SearchableJobInstanceDao createJobInstanceDao() throws Exception { JdbcSearchableJobInstanceDao dao = new JdbcSearchableJobInstanceDao(); dao.setJdbcTemplate(jdbcTemplate); - dao.setJobIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_SEQ")); + dao.setJobInstanceIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_SEQ")); dao.setTablePrefix(tablePrefix); dao.afterPropertiesSet(); return dao; @@ -224,6 +252,7 @@ protected SearchableJobExecutionDao createJobExecutionDao() throws Exception { dao.setJobExecutionIncrementer(incrementerFactory.getIncrementer(databaseType, tablePrefix + "JOB_EXECUTION_SEQ")); dao.setTablePrefix(tablePrefix); + dao.setTaskTablePrefix(taskTablePrefix); dao.setClobTypeToUse(determineClobTypeToUse(this.databaseType)); dao.setExitMessageLength(maxVarCharLength); dao.afterPropertiesSet(); @@ -258,7 +287,7 @@ protected ExecutionContextDao createExecutionContextDao() throws Exception { } private int determineClobTypeToUse(String databaseType) { - if (DatabaseType.SYBASE == DatabaseType.valueOf(databaseType.toUpperCase())) { + if (DatabaseType.SYBASE == DatabaseType.valueOf(databaseType.toUpperCase(Locale.ROOT))) { return Types.LONGVARCHAR; } else { @@ -273,13 +302,14 @@ private int determineClobTypeToUse(String databaseType) { */ @Override public JobService getObject() throws Exception { - JsrJobParametersConverter jobParametersConverter = new JsrJobParametersConverter(dataSource); - jobParametersConverter.afterPropertiesSet(); - JsrJobOperator jsrJobOperator = new JsrJobOperator(jobExplorer, jobRepository, jobParametersConverter, - transactionManager); - jsrJobOperator.afterPropertiesSet(); + + SimpleJobOperator jobOperator = new SimpleJobOperator(); + jobOperator.setJobExplorer(this.jobExplorer); + jobOperator.setJobLauncher(this.jobLauncher); + jobOperator.setJobRepository(this.jobRepository); + jobOperator.setJobRegistry(new MapJobRegistry()); return new SimpleJobService(createJobInstanceDao(), createJobExecutionDao(), createStepExecutionDao(), - jobRepository, createExecutionContextDao(), jsrJobOperator); + jobRepository, createExecutionContextDao(), jobOperator); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/AbstractSqlPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/AbstractSqlPagingQueryProvider.java new file mode 100644 index 0000000000..fce2f1829e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/AbstractSqlPagingQueryProvider.java @@ -0,0 +1,273 @@ +/* + * Copyright 2006--2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import javax.sql.DataSource; + +import org.springframework.batch.item.database.JdbcParameterUtils; +import org.springframework.batch.item.database.Order; +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Abstract SQL Paging Query Provider to serve as a base class for all provided + * SQL paging query providers. + * + * Any implementation must provide a way to specify the select clause, from + * clause and optionally a where clause. In addition a way to specify a single + * column sort key must also be provided. This sort key will be used to provide + * the paging functionality. It is recommended that there should be an index for + * the sort key to provide better performance. + * + * Provides properties and preparation for the mandatory "selectClause" and + * "fromClause" as well as for the optional "whereClause". Also provides + * property for the mandatory "sortKeys". Note: The columns that make up + * the sort key must be a true key and not just a column to order by. It is important + * to have a unique key constraint on the sort key to guarantee that no data is lost + * between executions. + * + * @author Thomas Risberg + * @author Dave Syer + * @author Michael Minella + * @author Mahmoud Ben Hassine + * @author Benjamin Hetz + * @author Corneil du Plessis + */ +public abstract class AbstractSqlPagingQueryProvider implements DataflowSqlPagingQueryProvider { + + private String selectClause; + + private String fromClause; + + private String whereClause; + + private Map sortKeys = new LinkedHashMap<>(); + + private String groupClause; + + private int parameterCount; + + private boolean usingNamedParameters; + + /** + * The setter for the group by clause + * @param groupClause SQL GROUP BY clause part of the SQL query string + */ + protected void setGroupClause(String groupClause) { + if (StringUtils.hasText(groupClause)) { + this.groupClause = removeKeyWord("group by", groupClause); + } + else { + this.groupClause = null; + } + } + + /** + * The getter for the group by clause + * @return SQL GROUP BY clause part of the SQL query string + */ + protected String getGroupClause() { + return this.groupClause; + } + + /** + * @param selectClause SELECT clause part of SQL query string + */ + protected void setSelectClause(String selectClause) { + this.selectClause = removeKeyWord("select", selectClause); + } + + /** + * @return SQL SELECT clause part of SQL query string + */ + protected String getSelectClause() { + return selectClause; + } + + /** + * @param fromClause FROM clause part of SQL query string + */ + protected void setFromClause(String fromClause) { + this.fromClause = removeKeyWord("from", fromClause); + } + + /** + * @return SQL FROM clause part of SQL query string + */ + protected String getFromClause() { + return fromClause; + } + + /** + * @param whereClause WHERE clause part of SQL query string + */ + public void setWhereClause(String whereClause) { + if (StringUtils.hasText(whereClause)) { + this.whereClause = removeKeyWord("where", whereClause); + } + else { + this.whereClause = null; + } + } + + /** + * @return SQL WHERE clause part of SQL query string + */ + protected String getWhereClause() { + return whereClause; + } + + /** + * @param sortKeys key to use to sort and limit page content + */ + protected void setSortKeys(Map sortKeys) { + this.sortKeys = sortKeys; + } + + /** + * A Map<String, Boolean> of sort columns as the key and boolean for + * ascending/descending (ascending = true). + * @return sortKey key to use to sort and limit page content + */ + @Override + public Map getSortKeys() { + return sortKeys; + } + + @Override + public int getParameterCount() { + return parameterCount; + } + + @Override + public boolean isUsingNamedParameters() { + return usingNamedParameters; + } + + /** + * The sort key placeholder will vary depending on whether named parameters or + * traditional placeholders are used in query strings. + * @return place holder for sortKey. + */ + @Override + public String getSortKeyPlaceHolder(String keyName) { + return usingNamedParameters ? ":_" + keyName : "?"; + } + + /** + * Check mandatory properties. + * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() + */ + @Override + public void init(DataSource dataSource) throws Exception { + Assert.notNull(dataSource, "A DataSource is required"); + Assert.hasLength(selectClause, "selectClause must be specified"); + Assert.hasLength(fromClause, "fromClause must be specified"); + Assert.notEmpty(sortKeys, "sortKey must be specified"); + StringBuilder sql = new StringBuilder(64); + sql.append("SELECT ").append(selectClause); + sql.append(" FROM ").append(fromClause); + if (whereClause != null) { + sql.append(" WHERE ").append(whereClause); + } + if (groupClause != null) { + sql.append(" GROUP BY ").append(groupClause); + } + List namedParameters = new ArrayList<>(); + parameterCount = JdbcParameterUtils.countParameterPlaceholders(sql.toString(), namedParameters); + if (namedParameters.size() > 0) { + if (parameterCount != namedParameters.size()) { + throw new InvalidDataAccessApiUsageException( + "You can't use both named parameters and classic \"?\" placeholders: " + sql); + } + usingNamedParameters = true; + } + } + + /** + * Method generating the query string to be used for retrieving the first page. This + * method must be implemented in sub classes. + * @param pageSize number of rows to read per page + * @return query string + */ + @Override + public abstract String generateFirstPageQuery(int pageSize); + + /** + * Method generating the query string to be used for retrieving the pages following + * the first page. This method must be implemented in sub classes. + * @param pageSize number of rows to read per page + * @return query string + */ + @Override + public abstract String generateRemainingPagesQuery(int pageSize); + + private String removeKeyWord(String keyWord, String clause) { + String temp = clause.trim(); + int length = keyWord.length(); + if (temp.toLowerCase(Locale.ROOT).startsWith(keyWord) && Character.isWhitespace(temp.charAt(length)) + && temp.length() > length + 1) { + return temp.substring(length + 1); + } + else { + return temp; + } + } + + /** + * @return sortKey key to use to sort and limit page content (without alias) + */ + @Override + public Map getSortKeysWithoutAliases() { + Map sortKeysWithoutAliases = new LinkedHashMap<>(); + + for (Map.Entry sortKeyEntry : sortKeys.entrySet()) { + String key = sortKeyEntry.getKey(); + int separator = key.indexOf('.'); + if (separator > 0) { + int columnIndex = separator + 1; + if (columnIndex < key.length()) { + sortKeysWithoutAliases.put(key.substring(columnIndex), sortKeyEntry.getValue()); + } + } + else { + sortKeysWithoutAliases.put(sortKeyEntry.getKey(), sortKeyEntry.getValue()); + } + } + + return sortKeysWithoutAliases; + } + /** + * Method generating the query string to be used for jumping to a specific + * item position. This method must be implemented in sub classes. + * + * @param itemIndex the index of the item to jump to + * @param pageSize number of rows to read per page + * @return query string + */ + @Override + public abstract String generateJumpToItemQuery(int itemIndex, int pageSize); + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/Db2PagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/Db2PagingQueryProvider.java new file mode 100644 index 0000000000..4a770a8c05 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/Db2PagingQueryProvider.java @@ -0,0 +1,55 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * DB2 implementation of a {@link DataflowSqlPagingQueryProvider} using + * database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class Db2PagingQueryProvider extends SqlWindowingPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + @Override + protected Object getSubQueryAlias() { + return "AS TMP_SUB "; + } + + private String buildLimitClause(int pageSize) { + return "FETCH FIRST " + pageSize + " ROWS ONLY"; + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/DerbyPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/DerbyPagingQueryProvider.java new file mode 100644 index 0000000000..e116e71a20 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/DerbyPagingQueryProvider.java @@ -0,0 +1,85 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import javax.sql.DataSource; +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.dao.InvalidDataAccessResourceUsageException; +import org.springframework.jdbc.support.JdbcUtils; + +/** + * Derby implementation of a {@link DataflowSqlPagingQueryProvider} using standard SQL:2003 windowing functions. + * These features are supported starting with Apache Derby version 10.4.1.3. + * + * As the OVER() function does not support the ORDER BY clause a sub query is instead used to order the results + * before the ROW_NUM restriction is applied + * + * @author Thomas Risberg + * @author David Thexton + * @author Michael Minella + * @author Corneil du Plessis + */ +public class DerbyPagingQueryProvider extends SqlWindowingPagingQueryProvider { + + private static final String MINIMAL_DERBY_VERSION = "10.4.1.3"; + + @Override + public void init(DataSource dataSource) throws Exception { + super.init(dataSource); + String version = JdbcUtils.extractDatabaseMetaData(dataSource, "getDatabaseProductVersion").toString(); + if (!isDerbyVersionSupported(version)) { + throw new InvalidDataAccessResourceUsageException("Apache Derby version " + version + " is not supported by this class, Only version " + MINIMAL_DERBY_VERSION + " or later is supported"); + } + } + + // derby version numbering is M.m.f.p [ {alpha|beta} ] see https://db.apache.org/derby/papers/versionupgrade.html#Basic+Numbering+Scheme + private boolean isDerbyVersionSupported(String version) { + String[] minimalVersionParts = MINIMAL_DERBY_VERSION.split("\\."); + String[] versionParts = version.split("[\\. ]"); + for (int i = 0; i < minimalVersionParts.length; i++) { + int minimalVersionPart = Integer.valueOf(minimalVersionParts[i]); + int versionPart = Integer.valueOf(versionParts[i]); + if (versionPart < minimalVersionPart) { + return false; + } else if (versionPart > minimalVersionPart) { + return true; + } + } + return true; + } + + @Override + protected String getOrderedQueryAlias() { + return "TMP_ORDERED"; + } + + @Override + protected String getOverClause() { + return ""; + } + + @Override + protected String getOverSubstituteClauseStart() { + return " FROM (SELECT " + getSelectClause(); + } + + @Override + protected String getOverSubstituteClauseEnd() { + return " ) AS " + getOrderedQueryAlias(); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/H2PagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/H2PagingQueryProvider.java new file mode 100644 index 0000000000..f2c565308a --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/H2PagingQueryProvider.java @@ -0,0 +1,50 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +/** + * H2 implementation of a {@link org.springframework.batch.item.database.PagingQueryProvider} using database specific features. + * + * @author Dave Syer + * @author Henning Pöttker + * @author Corneil du Plessis + */ +public class H2PagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + + private String buildLimitClause(int pageSize) { + return new StringBuilder().append("FETCH NEXT ").append(pageSize).append(" ROWS ONLY").toString(); + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = Math.max((page * pageSize) - 1, 0); + + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, "OFFSET " + offset + " ROWS FETCH NEXT 1 ROWS ONLY"); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/HsqlPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/HsqlPagingQueryProvider.java new file mode 100644 index 0000000000..48f0cad7b9 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/HsqlPagingQueryProvider.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import org.springframework.util.StringUtils; + +/** + * HSQLDB implementation of a {@link org.springframework.batch.item.database.PagingQueryProvider} using database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class HsqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); + } + } + + private String buildTopClause(int pageSize) { + return "TOP " + pageSize; + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = Math.max((page * pageSize) - 1, 0); + + return SqlPagingQueryUtils.generateTopJumpToQuery(this, "LIMIT " + offset + " 1"); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/MariaDBPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/MariaDBPagingQueryProvider.java new file mode 100644 index 0000000000..ff682fb6eb --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/MariaDBPagingQueryProvider.java @@ -0,0 +1,58 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * MariaDB implementation of a {@link DataflowSqlPagingQueryProvider} using database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class MariaDBPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = Math.max((page * pageSize) - 1, 0); + + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, "LIMIT " + offset + ", 1"); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/MySqlPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/MySqlPagingQueryProvider.java new file mode 100644 index 0000000000..3f0ccb394c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/MySqlPagingQueryProvider.java @@ -0,0 +1,57 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * MySQL implementation of a {@link DataflowSqlPagingQueryProvider} using database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class MySqlPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = Math.max((page * pageSize) - 1, 0); + + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, "LIMIT " + offset + ", 1"); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/OraclePagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/OraclePagingQueryProvider.java new file mode 100644 index 0000000000..0ea0c1456e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/OraclePagingQueryProvider.java @@ -0,0 +1,71 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import java.util.Map; + +import org.springframework.batch.item.database.Order; + +/** + * Oracle implementation of a + * {@link org.springframework.batch.item.database.PagingQueryProvider} using + * database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class OraclePagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateRowNumSqlQuery(this, false, buildRowNumClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + return SqlPagingQueryUtils.generateRowNumSqlQuery(this, true, buildRowNumClause(pageSize)); + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = (page * pageSize); + offset = offset == 0 ? 1 : offset; + String sortKeySelect = this.getSortKeySelect(); + return SqlPagingQueryUtils.generateRowNumSqlQueryWithNesting(this, sortKeySelect, sortKeySelect, false, "TMP_ROW_NUM = " + + offset); + } + + private String getSortKeySelect() { + StringBuilder sql = new StringBuilder(); + String prefix = ""; + + for (Map.Entry sortKey : this.getSortKeys().entrySet()) { + sql.append(prefix); + prefix = ", "; + sql.append(sortKey.getKey()); + } + + return sql.toString(); + } + + private String buildRowNumClause(int pageSize) { + return "ROWNUM <= " + pageSize; + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/PostgresPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/PostgresPagingQueryProvider.java new file mode 100644 index 0000000000..64ef561744 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/PostgresPagingQueryProvider.java @@ -0,0 +1,59 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * Postgres implementation of a {@link DataflowSqlPagingQueryProvider} using database specific features. + * + * When using the groupClause, this implementation expects all select fields not used in aggregate functions to be included in the + * groupClause (the provider does not add them for you). + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class PostgresPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + private String buildLimitClause(int pageSize) { + return new StringBuilder().append("LIMIT ").append(pageSize).toString(); + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = Math.max((page * pageSize) - 1, 0); + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, "LIMIT 1 OFFSET " + offset); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlPagingQueryProviderFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlPagingQueryProviderFactoryBean.java new file mode 100644 index 0000000000..75bb47150e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlPagingQueryProviderFactoryBean.java @@ -0,0 +1,208 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.batch.support; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Locale; +import java.util.Map; + +import javax.sql.DataSource; + +import org.springframework.batch.item.database.Order; +import org.springframework.batch.support.DatabaseType; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import static org.springframework.batch.support.DatabaseType.DB2; +import static org.springframework.batch.support.DatabaseType.DB2AS400; +import static org.springframework.batch.support.DatabaseType.DB2VSE; +import static org.springframework.batch.support.DatabaseType.DB2ZOS; +import static org.springframework.batch.support.DatabaseType.DERBY; +import static org.springframework.batch.support.DatabaseType.H2; +import static org.springframework.batch.support.DatabaseType.HSQL; +import static org.springframework.batch.support.DatabaseType.MARIADB; +import static org.springframework.batch.support.DatabaseType.MYSQL; +import static org.springframework.batch.support.DatabaseType.ORACLE; +import static org.springframework.batch.support.DatabaseType.POSTGRES; +import static org.springframework.batch.support.DatabaseType.SQLITE; +import static org.springframework.batch.support.DatabaseType.SQLSERVER; +import static org.springframework.batch.support.DatabaseType.SYBASE; + +/** + * Factory bean for {@link DataflowSqlPagingQueryProvider} interface. The database type + * will be determined from the data source if not provided explicitly. Valid + * types are given by the {@link DatabaseType} enum. + * + * @author Dave Syer + * @author Michael Minella + * @author Corneil du Plessis + */ +public class SqlPagingQueryProviderFactoryBean implements FactoryBean { + + private DataSource dataSource; + + private String databaseType; + + private String fromClause; + + private String whereClause; + + private String selectClause; + + private String groupClause; + + private Map sortKeys; + + private Map providers = new HashMap<>(); + + + { + providers.put(DB2, new Db2PagingQueryProvider()); + providers.put(DB2VSE, new Db2PagingQueryProvider()); + providers.put(DB2ZOS, new Db2PagingQueryProvider()); + providers.put(DB2AS400, new Db2PagingQueryProvider()); + providers.put(DERBY,new DerbyPagingQueryProvider()); + providers.put(HSQL,new HsqlPagingQueryProvider()); + providers.put(H2,new H2PagingQueryProvider()); + providers.put(MARIADB,new MariaDBPagingQueryProvider()); + providers.put(MYSQL,new MySqlPagingQueryProvider()); + providers.put(ORACLE,new OraclePagingQueryProvider()); + providers.put(POSTGRES,new PostgresPagingQueryProvider()); + providers.put(SQLITE, new SqlitePagingQueryProvider()); + providers.put(SQLSERVER,new SqlServerPagingQueryProvider()); + providers.put(SYBASE,new SybasePagingQueryProvider()); + } + + /** + * @param groupClause SQL GROUP BY clause part of the SQL query string + */ + public void setGroupClause(String groupClause) { + this.groupClause = groupClause; + } + + /** + * @param databaseType the databaseType to set + */ + public void setDatabaseType(String databaseType) { + this.databaseType = databaseType; + } + + /** + * @param dataSource the dataSource to set + */ + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + + /** + * @param fromClause the fromClause to set + */ + public void setFromClause(String fromClause) { + this.fromClause = fromClause; + } + + /** + * @param whereClause the whereClause to set + */ + public void setWhereClause(String whereClause) { + this.whereClause = whereClause; + } + + /** + * @param selectClause the selectClause to set + */ + public void setSelectClause(String selectClause) { + this.selectClause = selectClause; + } + + /** + * @param sortKeys the sortKeys to set + */ + public void setSortKeys(Map sortKeys) { + this.sortKeys = sortKeys; + } + + public void setSortKey(String key) { + Assert.doesNotContain(key, ",", "String setter is valid for a single ASC key only"); + + Map keys = new LinkedHashMap<>(); + keys.put(key, Order.ASCENDING); + + this.sortKeys = keys; + } + + /** + * Get a {@link DataflowSqlPagingQueryProvider} instance using the provided properties + * and appropriate for the given database type. + * + * @see FactoryBean#getObject() + */ + @Override + public DataflowSqlPagingQueryProvider getObject() throws Exception { + + DatabaseType type; + try { + type = databaseType != null ? DatabaseType.valueOf(databaseType.toUpperCase(Locale.ROOT)) : DatabaseType + .fromMetaData(dataSource); + } + catch (MetaDataAccessException e) { + throw new IllegalArgumentException( + "Could not inspect meta data for database type. You have to supply it explicitly.", e); + } + + AbstractSqlPagingQueryProvider provider = providers.get(type); + Assert.state(provider!=null, "Should not happen: missing PagingQueryProvider for DatabaseType="+type); + + provider.setFromClause(fromClause); + provider.setWhereClause(whereClause); + provider.setSortKeys(sortKeys); + if (StringUtils.hasText(selectClause)) { + provider.setSelectClause(selectClause); + } + if(StringUtils.hasText(groupClause)) { + provider.setGroupClause(groupClause); + } + + provider.init(dataSource); + + return provider; + + } + + /** + * Always returns {@link DataflowSqlPagingQueryProvider}. + * + * @see FactoryBean#getObjectType() + */ + @Override + public Class getObjectType() { + return DataflowSqlPagingQueryProvider.class; + } + + /** + * Always returns true. + * @see FactoryBean#isSingleton() + */ + @Override + public boolean isSingleton() { + return true; + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlPagingQueryUtils.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlPagingQueryUtils.java new file mode 100644 index 0000000000..3f0f16b347 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlPagingQueryUtils.java @@ -0,0 +1,379 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.springframework.batch.item.database.Order; +import org.springframework.util.StringUtils; + +/** + * Utility class that generates the actual SQL statements used by query + * providers. + * + * @author Thomas Risberg + * @author Dave Syer + * @author Michael Minella + * @author Corneil du Plessis + */ +public class SqlPagingQueryUtils { + + /** + * Generate SQL query string using a LIMIT clause + * + * @param provider {@link AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param remainingPageQuery is this query for the remaining pages (true) as + * opposed to the first page (false) + * @param limitClause the implementation specific limit clause to be used + * @return the generated query + */ + public static String generateLimitSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String limitClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + buildWhereClause(provider, remainingPageQuery, sql); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(" " + limitClause); + + return sql.toString(); + } + + /** + * Generate SQL query string using a LIMIT clause + * + * @param provider {@link org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param remainingPageQuery is this query for the remaining pages (true) as + * opposed to the first page (false) + * @param limitClause the implementation specific limit clause to be used + * @return the generated query + */ + public static String generateLimitGroupedSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String limitClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT * "); + sql.append(" FROM ("); + sql.append("SELECT ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(") AS MAIN_QRY "); + sql.append("WHERE "); + buildSortConditions(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(" " + limitClause); + + return sql.toString(); + } + + /** + * Generate SQL query string using a TOP clause + * + * @param provider {@link AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param remainingPageQuery is this query for the remaining pages (true) as + * opposed to the first page (false) + * @param topClause the implementation specific top clause to be used + * @return the generated query + */ + public static String generateTopSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String topClause) { + StringBuilder sql = new StringBuilder(128); + sql.append("SELECT ").append(topClause).append(" ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + buildWhereClause(provider, remainingPageQuery, sql); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + + return sql.toString(); + } + + /** + * Generate SQL query string using a TOP clause + * + * @param provider {@link AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param remainingPageQuery is this query for the remaining pages (true) as + * opposed to the first page (false) + * @param topClause the implementation specific top clause to be used + * @return the generated query + */ + public static String generateGroupedTopSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String topClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT ").append(topClause).append(" * FROM ("); + sql.append("SELECT ").append(provider.getSelectClause()); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(") AS MAIN_QRY "); + sql.append("WHERE "); + buildSortConditions(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + + return sql.toString(); + } + + /** + * Generate SQL query string using a ROW_NUM condition + * + * @param provider {@link AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param remainingPageQuery is this query for the remaining pages (true) as + * opposed to the first page (false) + * @param rowNumClause the implementation specific row num clause to be used + * @return the generated query + */ + public static String generateRowNumSqlQuery(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + String rowNumClause) { + + return generateRowNumSqlQuery(provider, provider.getSelectClause(), remainingPageQuery, rowNumClause); + + } + + /** + * Generate SQL query string using a ROW_NUM condition + * + * @param provider {@link AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param selectClause {@link String} containing the select portion of the query. + * @param remainingPageQuery is this query for the remaining pages (true) as + * opposed to the first page (false) + * @param rowNumClause the implementation specific row num clause to be used + * @return the generated query + */ + public static String generateRowNumSqlQuery(AbstractSqlPagingQueryProvider provider, String selectClause, + boolean remainingPageQuery, String rowNumClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT * FROM (SELECT ").append(selectClause); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(") WHERE ").append(rowNumClause); + if(remainingPageQuery) { + sql.append(" AND "); + buildSortConditions(provider, sql); + } + + return sql.toString(); + + } + + public static String generateRowNumSqlQueryWithNesting(AbstractSqlPagingQueryProvider provider, + String selectClause, boolean remainingPageQuery, String rowNumClause) { + return generateRowNumSqlQueryWithNesting(provider, selectClause, selectClause, remainingPageQuery, rowNumClause); + } + + public static String generateRowNumSqlQueryWithNesting(AbstractSqlPagingQueryProvider provider, + String innerSelectClause, String outerSelectClause, boolean remainingPageQuery, String rowNumClause) { + + StringBuilder sql = new StringBuilder(); + sql.append("SELECT ").append(outerSelectClause).append(" FROM (SELECT ").append(outerSelectClause) + .append(", ").append(StringUtils.hasText(provider.getGroupClause()) ? "MIN(ROWNUM) as TMP_ROW_NUM" : "ROWNUM as TMP_ROW_NUM"); + sql.append(" FROM (SELECT ").append(innerSelectClause).append(" FROM ").append(provider.getFromClause()); + buildWhereClause(provider, remainingPageQuery, sql); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(")) WHERE ").append(rowNumClause); + + return sql.toString(); + + } + + /** + * Generate SQL query string using a LIMIT clause + * + * @param provider {@link AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param limitClause the implementation specific top clause to be used + * @return the generated query + */ + public static String generateLimitJumpToQuery(AbstractSqlPagingQueryProvider provider, String limitClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT ").append(buildSortKeySelect(provider)); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + sql.append(" " + limitClause); + + return sql.toString(); + } + + /** + * Generate SQL query string using a TOP clause + * + * @param provider {@link AbstractSqlPagingQueryProvider} providing the + * implementation specifics + * @param topClause the implementation specific top clause to be used + * @return the generated query + */ + public static String generateTopJumpToQuery(AbstractSqlPagingQueryProvider provider, String topClause) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT ").append(topClause).append(" ").append(buildSortKeySelect(provider)); + sql.append(" FROM ").append(provider.getFromClause()); + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + buildGroupByClause(provider, sql); + sql.append(" ORDER BY ").append(buildSortClause(provider)); + + return sql.toString(); + } + + /** + * Generates ORDER BY attributes based on the sort keys. + * + * @param provider the {@link AbstractSqlPagingQueryProvider} to be used for + * used for pagination. + * @return a String that can be appended to an ORDER BY clause. + */ + public static String buildSortClause(AbstractSqlPagingQueryProvider provider) { + return buildSortClause(provider.getSortKeys()); + } + + /** + * Generates ORDER BY attributes based on the sort keys. + * + * @param sortKeys {@link Map} where the key is the name of the column to be + * sorted and the value contains the {@link Order}. + * @return a String that can be appended to an ORDER BY clause. + */ + public static String buildSortClause(Map sortKeys) { + StringBuilder builder = new StringBuilder(); + String prefix = ""; + + for (Entry sortKey : sortKeys.entrySet()) { + builder.append(prefix); + + prefix = ", "; + + builder.append(sortKey.getKey()); + + if(sortKey.getValue() != null && sortKey.getValue() == Order.DESCENDING) { + builder.append(" DESC"); + } + else { + builder.append(" ASC"); + } + } + + return builder.toString(); + } + + /** + * Appends the where conditions required to query for the subsequent pages. + * + * @param provider the {@link AbstractSqlPagingQueryProvider} to be used for + * pagination. + * @param sql {@link StringBuilder} containing the sql to be used for the + * query. + */ + public static void buildSortConditions( + AbstractSqlPagingQueryProvider provider, StringBuilder sql) { + List> keys = new ArrayList<>(provider.getSortKeys().entrySet()); + List clauses = new ArrayList<>(); + + for(int i = 0; i < keys.size(); i++) { + StringBuilder clause = new StringBuilder(); + + String prefix = ""; + for(int j = 0; j < i; j++) { + clause.append(prefix); + prefix = " AND "; + Entry entry = keys.get(j); + clause.append(entry.getKey()); + clause.append(" = "); + clause.append(provider.getSortKeyPlaceHolder(entry.getKey())); + } + + if(clause.length() > 0) { + clause.append(" AND "); + } + clause.append(keys.get(i).getKey()); + + if(keys.get(i).getValue() != null && keys.get(i).getValue() == Order.DESCENDING) { + clause.append(" < "); + } + else { + clause.append(" > "); + } + + clause.append(provider.getSortKeyPlaceHolder(keys.get(i).getKey())); + + clauses.add(clause.toString()); + } + + sql.append("("); + String prefix = ""; + + for (String curClause : clauses) { + sql.append(prefix); + prefix = " OR "; + sql.append("("); + sql.append(curClause); + sql.append(")"); + } + sql.append(")"); + } + + private static String buildSortKeySelect(AbstractSqlPagingQueryProvider provider) { + StringBuilder select = new StringBuilder(); + + String prefix = ""; + + for (Entry sortKey : provider.getSortKeys().entrySet()) { + select.append(prefix); + + prefix = ", "; + + select.append(sortKey.getKey()); + } + + return select.toString(); + } + + private static void buildWhereClause(AbstractSqlPagingQueryProvider provider, boolean remainingPageQuery, + StringBuilder sql) { + if (remainingPageQuery) { + sql.append(" WHERE "); + if (provider.getWhereClause() != null) { + sql.append("("); + sql.append(provider.getWhereClause()); + sql.append(") AND "); + } + + buildSortConditions(provider, sql); + } + else { + sql.append(provider.getWhereClause() == null ? "" : " WHERE " + provider.getWhereClause()); + } + } + + private static void buildGroupByClause(AbstractSqlPagingQueryProvider provider, StringBuilder sql) { + if(StringUtils.hasText(provider.getGroupClause())) { + sql.append(" GROUP BY "); + sql.append(provider.getGroupClause()); + } + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlServerPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlServerPagingQueryProvider.java new file mode 100644 index 0000000000..7ec3f1d7ac --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlServerPagingQueryProvider.java @@ -0,0 +1,56 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * SQL Server implementation of a + * {@link DataflowSqlPagingQueryProvider} using + * database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class SqlServerPagingQueryProvider extends SqlWindowingPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); + } + } + + @Override + protected Object getSubQueryAlias() { + return "AS TMP_SUB "; + } + + private String buildTopClause(int pageSize) { + return new StringBuilder().append("TOP ").append(pageSize).toString(); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlWindowingPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlWindowingPagingQueryProvider.java new file mode 100644 index 0000000000..1069390b96 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlWindowingPagingQueryProvider.java @@ -0,0 +1,180 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import java.util.LinkedHashMap; +import java.util.Locale; +import java.util.Map; + +import org.springframework.batch.item.database.Order; +import org.springframework.util.StringUtils; + +/** + * Generic Paging Query Provider using standard SQL:2003 windowing functions. + * These features are supported by DB2, Oracle, SQL Server 2005, Sybase and + * Apache Derby version 10.4.1.3 + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class SqlWindowingPagingQueryProvider extends AbstractSqlPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return "SELECT * FROM ( " + + "SELECT " + (StringUtils.hasText(getOrderedQueryAlias()) ? getOrderedQueryAlias() + ".*, " : "*, ") + + "ROW_NUMBER() OVER (" + getOverClause() + + ") AS ROW_NUMBER" + + getOverSubstituteClauseStart() + + " FROM " + getFromClause() + + (!StringUtils.hasText(getWhereClause()) ? "" : " WHERE " + getWhereClause()) + + (!StringUtils.hasText(getGroupClause()) ? "" : " GROUP BY " + getGroupClause()) + + getOverSubstituteClauseEnd() + + ") " + getSubQueryAlias() + "WHERE " + extractTableAlias() + + "ROW_NUMBER <= " + pageSize + + " ORDER BY " + SqlPagingQueryUtils.buildSortClause(this); + } + + protected String getOrderedQueryAlias() { + return ""; + } + + protected Object getSubQueryAlias() { + return "AS TMP_SUB "; + } + + protected Object extractTableAlias() { + String alias = "" + getSubQueryAlias(); + if (StringUtils.hasText(alias) && alias.toUpperCase(Locale.ROOT).startsWith("AS")) { + alias = alias.substring(3).trim() + "."; + } + return alias; + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + StringBuilder sql = new StringBuilder(); + sql.append("SELECT * FROM ( "); + sql.append("SELECT ").append(StringUtils.hasText(getOrderedQueryAlias()) ? getOrderedQueryAlias() + ".*, " : "*, "); + sql.append("ROW_NUMBER() OVER (").append(getOverClause()); + sql.append(") AS ROW_NUMBER"); + sql.append(getOverSubstituteClauseStart()); + sql.append(" FROM ").append(getFromClause()); + if (StringUtils.hasText(getWhereClause())) { + sql.append(" WHERE "); + sql.append(getWhereClause()); + } + if(StringUtils.hasText(getGroupClause())) { + sql.append(" GROUP BY "); + sql.append(getGroupClause()); + } + sql.append(getOverSubstituteClauseEnd()); + sql.append(") ") + .append(getSubQueryAlias()) + .append("WHERE ") + .append(extractTableAlias()) + .append("ROW_NUMBER <= ") + .append(pageSize); + sql.append(" AND "); + SqlPagingQueryUtils.buildSortConditions(this, sql); + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(this)); + + return sql.toString(); + } + + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int lastRowNum = (page * pageSize); + if (lastRowNum <= 0) { + lastRowNum = 1; + } + + StringBuilder sql = new StringBuilder(); + sql.append("SELECT "); + buildSortKeySelect(sql, getSortKeysReplaced(extractTableAlias())); + sql.append(" FROM ( "); + sql.append("SELECT "); + buildSortKeySelect(sql); + sql.append(", ROW_NUMBER() OVER (").append(getOverClause()); + sql.append(") AS ROW_NUMBER"); + sql.append(getOverSubstituteClauseStart()); + sql.append(" FROM ").append(getFromClause()); + sql.append(getWhereClause() == null ? "" : " WHERE " + getWhereClause()); + sql.append(getGroupClause() == null ? "" : " GROUP BY " + getGroupClause()); + sql.append(getOverSubstituteClauseEnd()); + sql.append(") ").append(getSubQueryAlias()).append("WHERE ").append(extractTableAlias()).append( + "ROW_NUMBER = ").append(lastRowNum); + sql.append(" ORDER BY ").append(SqlPagingQueryUtils.buildSortClause(getSortKeysReplaced(extractTableAlias()))); + + return sql.toString(); + } + + private Map getSortKeysReplaced(Object qualifierReplacement) { + final String newQualifier = "" + qualifierReplacement; + final Map sortKeys = new LinkedHashMap<>(); + for (Map.Entry sortKey : getSortKeys().entrySet()) { + sortKeys.put(sortKey.getKey().replaceFirst("^.*\\.", newQualifier), sortKey.getValue()); + } + return sortKeys; + } + + private void buildSortKeySelect(StringBuilder sql) { + buildSortKeySelect(sql, null); + } + + private void buildSortKeySelect(StringBuilder sql, Map sortKeys) { + String prefix = ""; + if (sortKeys == null) { + sortKeys = getSortKeys(); + } + for (Map.Entry sortKey : sortKeys.entrySet()) { + sql.append(prefix); + prefix = ", "; + sql.append(sortKey.getKey()); + } + } + + protected String getOverClause() { + StringBuilder sql = new StringBuilder(); + + sql.append(" ORDER BY ").append(buildSortClause(this)); + + return sql.toString(); + } + + protected String getOverSubstituteClauseStart() { + return ""; + } + + protected String getOverSubstituteClauseEnd() { + return ""; + } + + + /** + * Generates ORDER BY attributes based on the sort keys. + * + * @param provider + * @return a String that can be appended to an ORDER BY clause. + */ + private String buildSortClause(AbstractSqlPagingQueryProvider provider) { + return SqlPagingQueryUtils.buildSortClause(provider.getSortKeysWithoutAliases()); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlitePagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlitePagingQueryProvider.java new file mode 100644 index 0000000000..e63eea1ba1 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SqlitePagingQueryProvider.java @@ -0,0 +1,65 @@ +/* + * Copyright 2014-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * SQLite implementation of a {@link DataflowSqlPagingQueryProvider} using database specific + * features. + * + * @author Luke Taylor + * @author Corneil du Plessis + */ +public class SqlitePagingQueryProvider extends AbstractSqlPagingQueryProvider { + /* (non-Javadoc) + * @see org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider#generateFirstPageQuery(int) + */ + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, false, buildLimitClause(pageSize)); + } + + /* (non-Javadoc) + * @see org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider#generateRemainingPagesQuery(int) + */ + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateLimitGroupedSqlQuery(this, true, buildLimitClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateLimitSqlQuery(this, true, buildLimitClause(pageSize)); + } + } + + /* (non-Javadoc) + * @see org.springframework.batch.item.database.support.AbstractSqlPagingQueryProvider#generateJumpToItemQuery(int, int) + */ + @Override + public String generateJumpToItemQuery(int itemIndex, int pageSize) { + int page = itemIndex / pageSize; + int offset = Math.max((page * pageSize) - 1, 0); + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, "LIMIT " + offset + ", 1"); + } + + private String buildLimitClause(int pageSize) { + return "LIMIT " + pageSize; + } +} + diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SybasePagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SybasePagingQueryProvider.java new file mode 100644 index 0000000000..64a0a6aa05 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/support/SybasePagingQueryProvider.java @@ -0,0 +1,56 @@ +/* + * Copyright 2006-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.batch.support; + +import org.springframework.cloud.dataflow.server.batch.DataflowSqlPagingQueryProvider; +import org.springframework.util.StringUtils; + +/** + * Sybase implementation of a {@link DataflowSqlPagingQueryProvider} using + * database specific features. + * + * @author Thomas Risberg + * @author Michael Minella + * @author Corneil du Plessis + */ +public class SybasePagingQueryProvider extends SqlWindowingPagingQueryProvider { + + @Override + public String generateFirstPageQuery(int pageSize) { + return SqlPagingQueryUtils.generateTopSqlQuery(this, false, buildTopClause(pageSize)); + } + + @Override + public String generateRemainingPagesQuery(int pageSize) { + if(StringUtils.hasText(getGroupClause())) { + return SqlPagingQueryUtils.generateGroupedTopSqlQuery(this, true, buildTopClause(pageSize)); + } + else { + return SqlPagingQueryUtils.generateTopSqlQuery(this, true, buildTopClause(pageSize)); + } + } + + @Override + protected Object getSubQueryAlias() { + return ""; + } + + private String buildTopClause(int pageSize) { + return "TOP " + pageSize; + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java index 2bfc62f904..d7c35c7e41 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.server.config; +import java.net.URI; import java.util.Arrays; import java.util.Optional; import java.util.concurrent.ForkJoinPool; @@ -26,6 +27,9 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.ObjectProvider; +import org.springframework.boot.actuate.info.BuildInfoContributor; +import org.springframework.boot.actuate.info.GitInfoContributor; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -80,6 +84,7 @@ import org.springframework.cloud.dataflow.server.controller.TaskCtrController; import org.springframework.cloud.dataflow.server.controller.TaskDefinitionController; import org.springframework.cloud.dataflow.server.controller.TaskExecutionController; +import org.springframework.cloud.dataflow.server.controller.TaskExecutionThinController; import org.springframework.cloud.dataflow.server.controller.TaskLogsController; import org.springframework.cloud.dataflow.server.controller.TaskPlatformController; import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController; @@ -116,13 +121,14 @@ import org.springframework.cloud.dataflow.server.service.impl.validation.DefaultTaskValidationService; import org.springframework.cloud.dataflow.server.stream.SkipperStreamDeployer; import org.springframework.cloud.dataflow.server.stream.StreamDeployer; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; import org.springframework.cloud.deployer.resource.maven.MavenProperties; import org.springframework.cloud.deployer.resource.support.DelegatingResourceLoader; import org.springframework.cloud.skipper.client.DefaultSkipperClient; import org.springframework.cloud.skipper.client.SkipperClient; import org.springframework.cloud.skipper.client.SkipperClientProperties; import org.springframework.cloud.skipper.client.SkipperClientResponseErrorHandler; -import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.cloud.skipper.client.util.HttpClientConfigurer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -130,7 +136,6 @@ import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.springframework.hateoas.mediatype.MessageResolver; import org.springframework.hateoas.mediatype.hal.CurieProvider; -import org.springframework.hateoas.mediatype.hal.HalConfiguration; import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.hateoas.server.EntityLinks; import org.springframework.hateoas.server.core.AnnotationLinkRelationProvider; @@ -150,13 +155,14 @@ * @author Andy Clement * @author Glenn Renfro * @author Christian Tzolov + * @author Corneil du Plessis */ +@AutoConfiguration @SuppressWarnings("all") -@Configuration @Import(CompletionConfiguration.class) -@ConditionalOnBean({ EnableDataFlowServerConfiguration.Marker.class }) -@EnableConfigurationProperties({ FeaturesProperties.class, VersionInfoProperties.class, - DockerValidatorProperties.class, DataflowMetricsProperties.class }) +@ConditionalOnBean({EnableDataFlowServerConfiguration.Marker.class}) +@EnableConfigurationProperties({FeaturesProperties.class, VersionInfoProperties.class, + DockerValidatorProperties.class, DataflowMetricsProperties.class}) @ConditionalOnProperty(prefix = "dataflow.server", name = "enabled", havingValue = "true", matchIfMissing = true) @EntityScan({ "org.springframework.cloud.dataflow.core" @@ -179,7 +185,7 @@ public RootController rootController(EntityLinks entityLinks) { @Bean public CompletionController completionController(StreamCompletionProvider completionProvider, - TaskCompletionProvider taskCompletionProvider) { + TaskCompletionProvider taskCompletionProvider) { return new CompletionController(completionProvider, taskCompletionProvider); } @@ -190,13 +196,16 @@ public ToolsController toolsController() { @Bean public AboutController aboutController(ObjectProvider streamDeployer, - ObjectProvider launcherRepository, - FeaturesProperties featuresProperties, - VersionInfoProperties versionInfoProperties, - SecurityStateBean securityStateBean, - DataflowMetricsProperties monitoringDashboardInfoProperties) { + ObjectProvider launcherRepository, + FeaturesProperties featuresProperties, + VersionInfoProperties versionInfoProperties, + SecurityStateBean securityStateBean, + DataflowMetricsProperties monitoringDashboardInfoProperties, + ObjectProvider gitInfoContributor, + ObjectProvider buildInfoContributor) { return new AboutController(streamDeployer.getIfAvailable(), launcherRepository.getIfAvailable(), - featuresProperties, versionInfoProperties, securityStateBean, monitoringDashboardInfoProperties); + featuresProperties, versionInfoProperties, securityStateBean, monitoringDashboardInfoProperties, + gitInfoContributor, buildInfoContributor); } @Bean @@ -209,7 +218,8 @@ public RestControllerAdvice restControllerAdvice() { return new RestControllerAdvice(); } - @Configuration + + @Configuration(proxyBeanMethods = false) public static class AppRegistryConfiguration { @Bean @@ -222,13 +232,13 @@ public ForkJoinPoolFactoryBean appRegistryFJPFB() { @Bean public AppResourceCommon appResourceCommon(@Nullable MavenProperties mavenProperties, - DelegatingResourceLoader delegatingResourceLoader) { + DelegatingResourceLoader delegatingResourceLoader) { return new AppResourceCommon(mavenProperties, delegatingResourceLoader); } @Bean public AppRegistryService appRegistryService(AppRegistrationRepository appRegistrationRepository, - AppResourceCommon appResourceCommon, AuditRecordService auditRecordService) { + AppResourceCommon appResourceCommon, AuditRecordService auditRecordService) { return new DefaultAppRegistryService(appRegistrationRepository, appResourceCommon, auditRecordService); } @@ -253,18 +263,31 @@ public AppRegistrationAssemblerProvider appRegistryAssemblerProvider() { } } - @Configuration + @Configuration(proxyBeanMethods = false) @ConditionalOnTasksEnabled public static class TaskEnabledConfiguration { @Bean - public TaskExecutionController taskExecutionController(TaskExplorer explorer, - TaskExecutionService taskExecutionService, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionInfoService taskExecutionInfoService, - TaskDeleteService taskDeleteService) { - return new TaskExecutionController(explorer, taskExecutionService, taskDefinitionRepository, + public TaskExecutionController taskExecutionController( + DataflowTaskExplorer explorer, + TaskExecutionService taskExecutionService, + TaskDefinitionRepository taskDefinitionRepository, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeleteService taskDeleteService, + TaskJobService taskJobService + ) { + return new TaskExecutionController(explorer, + taskExecutionService, + taskDefinitionRepository, taskExecutionInfoService, - taskDeleteService); + taskDeleteService, + taskJobService + ); + } + + @Bean + public TaskExecutionThinController taskExecutionThinController(DataflowTaskExplorer taskExplorer, TaskDefinitionRepository taskDefinitionRepository) { + return new TaskExecutionThinController(taskExplorer, taskDefinitionRepository); } @Bean @@ -274,15 +297,22 @@ public TaskPlatformController taskLauncherController(LauncherService launcherSer @Bean @ConditionalOnMissingBean - public TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider(TaskExecutionService taskExecutionService) { - return new DefaultTaskDefinitionAssemblerProvider(taskExecutionService); + public TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider( + TaskExecutionService taskExecutionService, + TaskJobService taskJobService, + DataflowTaskExplorer taskExplorer + ) { + return new DefaultTaskDefinitionAssemblerProvider(taskExecutionService, taskJobService, taskExplorer); } @Bean - public TaskDefinitionController taskDefinitionController(TaskExplorer taskExplorer, - TaskDefinitionRepository repository, TaskSaveService taskSaveService, - TaskDeleteService taskDeleteService, - TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { + public TaskDefinitionController taskDefinitionController( + DataflowTaskExplorer taskExplorer, + TaskDefinitionRepository repository, + TaskSaveService taskSaveService, + TaskDeleteService taskDeleteService, + TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider + ) { return new TaskDefinitionController(taskExplorer, repository, taskSaveService, taskDeleteService, taskDefinitionAssemblerProvider); } @@ -303,13 +333,13 @@ public JobExecutionThinController jobExecutionThinController(TaskJobService repo } @Bean - public JobStepExecutionController jobStepExecutionController(JobService service) { - return new JobStepExecutionController(service); + public JobStepExecutionController jobStepExecutionController(JobService jobService) { + return new JobStepExecutionController(jobService); } @Bean - public JobStepExecutionProgressController jobStepExecutionProgressController(JobService service) { - return new JobStepExecutionProgressController(service); + public JobStepExecutionProgressController jobStepExecutionProgressController(JobService jobService, TaskJobService taskJobService) { + return new JobStepExecutionProgressController(jobService, taskJobService); } @Bean @@ -319,9 +349,9 @@ public JobInstanceController jobInstanceController(TaskJobService repository) { @Bean public TaskValidationService taskValidationService(AppRegistryService appRegistry, - DockerValidatorProperties dockerValidatorProperties, - TaskDefinitionRepository taskDefinitionRepository, - TaskConfigurationProperties taskConfigurationProperties) { + DockerValidatorProperties dockerValidatorProperties, + TaskDefinitionRepository taskDefinitionRepository, + TaskConfigurationProperties taskConfigurationProperties) { return new DefaultTaskValidationService(appRegistry, dockerValidatorProperties, taskDefinitionRepository); @@ -344,24 +374,25 @@ public LauncherService launcherService(LauncherRepository launcherRepository) { @Bean public TaskCtrController tasksCtrController(ApplicationConfigurationMetadataResolver metadataResolver, - TaskConfigurationProperties taskConfigurationProperties, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties, - AppResourceCommon appResourceCommon) { + TaskConfigurationProperties taskConfigurationProperties, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties, + AppResourceCommon appResourceCommon) { return new TaskCtrController(metadataResolver, taskConfigurationProperties, composedTaskRunnerConfigurationProperties, appResourceCommon); } + } - @Configuration + @Configuration(proxyBeanMethods = false) @ConditionalOnStreamsEnabled @EnableConfigurationProperties(SkipperClientProperties.class) public static class StreamEnabledConfiguration { @Bean public StreamValidationService streamValidationService(AppRegistryService appRegistry, - DockerValidatorProperties dockerValidatorProperties, - StreamDefinitionRepository streamDefinitionRepository, - StreamDefinitionService streamDefinitionService) { + DockerValidatorProperties dockerValidatorProperties, + StreamDefinitionRepository streamDefinitionRepository, + StreamDefinitionService streamDefinitionService) { return new DefaultStreamValidationService(appRegistry, dockerValidatorProperties, streamDefinitionRepository, @@ -384,9 +415,9 @@ public StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider( @Bean @ConditionalOnMissingBean public StreamDefinitionController streamDefinitionController(StreamService streamService, - StreamDefinitionService streamDefinitionService, AppRegistryService appRegistryService, - StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, - AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { + StreamDefinitionService streamDefinitionService, AppRegistryService appRegistryService, + StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, + AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { return new StreamDefinitionController(streamService, streamDefinitionService, appRegistryService, streamDefinitionAssemblerProvider, appRegistrationAssemblerProvider); } @@ -411,6 +442,7 @@ public StreamLogsController streamLogsController(StreamDeployer streamDeployer) return new StreamLogsController(streamDeployer); } + @Bean @ConditionalOnMissingBean(name = "runtimeAppsStatusFJPFB") public ForkJoinPoolFactoryBean runtimeAppsStatusFJPFB() { @@ -428,13 +460,13 @@ public StreamDeploymentController updatableStreamDeploymentController( @Bean public SkipperClient skipperClient(SkipperClientProperties properties, - RestTemplateBuilder restTemplateBuilder, ObjectMapper objectMapper, - @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService) { + RestTemplateBuilder restTemplateBuilder, ObjectMapper objectMapper, + @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService) { // TODO (Tzolov) review the manual Hal convertion configuration objectMapper.registerModule(new Jackson2HalModule()); objectMapper.setHandlerInstantiator(new Jackson2HalModule.HalHandlerInstantiator( - new AnnotationLinkRelationProvider(), CurieProvider.NONE, MessageResolver.DEFAULTS_ONLY, new HalConfiguration())); + new AnnotationLinkRelationProvider(), CurieProvider.NONE, MessageResolver.DEFAULTS_ONLY)); objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); RestTemplate restTemplate = restTemplateBuilder @@ -444,16 +476,24 @@ public SkipperClient skipperClient(SkipperClientProperties properties, new MappingJackson2HttpMessageConverter(objectMapper))) .build(); + if (properties.isSkipSslValidation()) { + restTemplate.setRequestFactory(HttpClientConfigurer.create() + .targetHost(URI.create(properties.getServerUri())) + .skipTlsCertificateVerification(true) + .buildClientHttpRequestFactory()); + logger.warn("Skipper Client - Skip SSL Validation is Enabbled!"); + } + return new DefaultSkipperClient(properties.getServerUri(), restTemplate); } @Bean public SkipperStreamDeployer skipperStreamDeployer(SkipperClient skipperClient, - StreamDefinitionRepository streamDefinitionRepository, - SkipperClientProperties skipperClientProperties, - AppRegistryService appRegistryService, - ForkJoinPool runtimeAppsStatusFJPFB, - StreamDefinitionService streamDefinitionService) { + StreamDefinitionRepository streamDefinitionRepository, + SkipperClientProperties skipperClientProperties, + AppRegistryService appRegistryService, + ForkJoinPool runtimeAppsStatusFJPFB, + StreamDefinitionService streamDefinitionService) { logger.info("Skipper URI [" + skipperClientProperties.getServerUri() + "]"); return new SkipperStreamDeployer(skipperClient, streamDefinitionRepository, appRegistryService, runtimeAppsStatusFJPFB, streamDefinitionService); @@ -461,9 +501,9 @@ public SkipperStreamDeployer skipperStreamDeployer(SkipperClient skipperClient, @Bean public AppDeploymentRequestCreator streamDeploymentPropertiesUtils(AppRegistryService appRegistry, - CommonApplicationProperties commonApplicationProperties, - ApplicationConfigurationMetadataResolver applicationConfigurationMetadataResolver, - StreamDefinitionService streamDefinitionService) { + CommonApplicationProperties commonApplicationProperties, + ApplicationConfigurationMetadataResolver applicationConfigurationMetadataResolver, + StreamDefinitionService streamDefinitionService) { return new AppDeploymentRequestCreator(appRegistry, commonApplicationProperties, applicationConfigurationMetadataResolver, streamDefinitionService); } @@ -485,11 +525,12 @@ public TaskSchedulerController taskSchedulerController(SchedulerService schedule return new TaskSchedulerController(schedulerService); } - @Configuration + @Configuration(proxyBeanMethods = false) public static class AuditingConfiguration { + @Bean public AuditRecordService auditRecordService(AuditRecordRepository auditRecordRepository, - ObjectMapper objectMapper) { + ObjectMapper objectMapper) { return new DefaultAuditRecordService(auditRecordRepository); } @@ -500,7 +541,7 @@ public AuditRecordController auditController(AuditRecordService auditRecordServi } } - @Configuration + @Configuration(proxyBeanMethods = false) public static class SecurityConfiguration { @Bean diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerAutoConfiguration.java index fb18552fed..90f4ed12ae 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerAutoConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerAutoConfiguration.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.server.config; +import org.springframework.boot.autoconfigure.AutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration; -import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; /** @@ -28,7 +28,7 @@ * * @author Janne Valkealahti */ -@Configuration +@AutoConfiguration @AutoConfigureBefore({JacksonAutoConfiguration.class}) @ConditionalOnBean(EnableDataFlowServerConfiguration.Marker.class) @Import(DataFlowServerConfiguration.class) diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java index 2508df666f..71bb9cf9aa 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,14 +16,9 @@ package org.springframework.cloud.dataflow.server.config; -import javax.persistence.EntityManager; -import javax.servlet.Filter; -import javax.sql.DataSource; +import jakarta.persistence.EntityManager; +import jakarta.servlet.Filter; -import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; -import org.springframework.beans.factory.ObjectProvider; import org.springframework.boot.autoconfigure.batch.BatchProperties; import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -35,22 +30,13 @@ import org.springframework.cloud.dataflow.server.config.features.FeaturesConfiguration; import org.springframework.cloud.dataflow.server.config.web.WebConfiguration; import org.springframework.cloud.dataflow.server.db.migration.DataFlowFlywayConfigurationCustomizer; -import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; import org.springframework.cloud.dataflow.server.support.AuthenticationSuccessEventListener; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorerConfiguration; import org.springframework.cloud.task.configuration.TaskProperties; -import org.springframework.cloud.task.repository.support.DatabaseType; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -import org.springframework.context.annotation.Primary; import org.springframework.data.web.config.EnableSpringDataWebSupport; -import org.springframework.hateoas.config.EnableHypermediaSupport; -import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.web.filter.ForwardedHeaderFilter; @@ -68,11 +54,21 @@ * @author Josh Long * @author Michael Minella * @author Gunnar Hillert + * @author Michael Wirth + * @author Corneil du Plessis */ -@EnableHypermediaSupport(type = EnableHypermediaSupport.HypermediaType.HAL) @EnableSpringDataWebSupport @Configuration -@Import({ CompletionConfiguration.class, FeaturesConfiguration.class, WebConfiguration.class }) +@Import({ + CompletionConfiguration.class, + FeaturesConfiguration.class, + WebConfiguration.class, + H2ServerConfiguration.class, + DataflowTaskExplorerConfiguration.class, + DataFlowTaskConfiguration.class, + SecurityConfiguration.class + +}) @EnableConfigurationProperties({ BatchProperties.class, CommonApplicationProperties.class }) public class DataFlowServerConfiguration { @@ -87,49 +83,25 @@ public Filter forwardedHeaderFilter() { } @Bean - @Primary - public PlatformTransactionManager transactionManager( - ObjectProvider transactionManagerCustomizers) { + PlatformTransactionManager transactionManager(TransactionManagerCustomizers transactionManagerCustomizers) { JpaTransactionManager transactionManager = new JpaTransactionManager(); - transactionManagerCustomizers.ifAvailable((customizers) -> customizers.customize(transactionManager)); + transactionManagerCustomizers.customize(transactionManager); return transactionManager; } - @Bean - DataflowJobExecutionDao dataflowJobExecutionDao(DataSource dataSource) { - return new JdbcDataflowJobExecutionDao(dataSource, AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX); - } @Bean public TaskProperties taskProperties() { return new TaskProperties(); } - @Bean - DataflowTaskExecutionDao dataflowTaskExecutionDao(DataSource dataSource, TaskProperties taskProperties) { - return new JdbcDataflowTaskExecutionDao(dataSource, taskProperties); - } - @Bean - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao(DataSource dataSource) { - DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); - String databaseType; - try { - databaseType = DatabaseType.fromMetaData(dataSource).name(); - } - catch (MetaDataAccessException e) { - throw new IllegalStateException(e); - } - return new JdbcDataflowTaskExecutionMetadataDao(dataSource, incrementerFactory.getIncrementer(databaseType, - "task_execution_metadata_seq")); - } - @Bean public AuthenticationSuccessEventListener authenticationSuccessEventListener( AuditRecordService auditRecordService) { return new AuthenticationSuccessEventListener(auditRecordService); } - + @Bean public AppRegistrationRepositoryCustom appRegistrationRepositoryCustom(EntityManager entityManager) { return new AppRegistrationRepositoryImpl(entityManager); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowTaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowTaskConfiguration.java new file mode 100644 index 0000000000..5b2be1c25a --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowTaskConfiguration.java @@ -0,0 +1,159 @@ +/* + * Copyright 2023-2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.config; + +import java.sql.SQLException; + +import javax.sql.DataSource; + +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.launch.support.TaskExecutorJobLauncher; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.beans.BeanUtils; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.server.batch.AllInOneExecutionContextSerializer; +import org.springframework.cloud.dataflow.server.batch.JdbcSearchableJobExecutionDao; +import org.springframework.cloud.dataflow.server.batch.JobService; +import org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDefinitionReader; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDeploymentReader; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; +import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; +import org.springframework.cloud.dataflow.server.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.server.task.TaskDeploymentReader; +import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; +import org.springframework.cloud.task.configuration.TaskProperties; +import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; +import org.springframework.cloud.task.repository.dao.TaskExecutionDao; +import org.springframework.cloud.task.repository.support.DatabaseType; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * Configuration for DAO Containers use for multiple schema targets. + * + * @author Corneil du Plessis + */ +@Configuration +public class DataFlowTaskConfiguration { + + @Bean + public DataflowJobExecutionDao dataflowJobExecutionDao(DataSource dataSource) { + return new JdbcDataflowJobExecutionDao(dataSource, "BATCH_"); + } + + @Bean + public DataflowTaskExecutionDao dataflowTaskExecutionDao(DataSource dataSource, + TaskProperties taskProperties) { + TaskProperties properties = new TaskProperties(); + BeanUtils.copyProperties(taskProperties, properties); + properties.setTablePrefix("TASK_"); + return new JdbcDataflowTaskExecutionDao(dataSource, properties); + } + + @Bean + public DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao(DataSource dataSource) + throws SQLException { + DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + DataflowTaskExecutionMetadataDao dao = new JdbcDataflowTaskExecutionMetadataDao( + dataSource, + incrementerFactory.getIncrementer(databaseType, "TASK_EXECUTION_METADATA_SEQ"), + "TASK_"); + return dao; + } + + @Bean + public TaskExecutionDao taskExecutionDao(DataSource dataSource) throws Exception{ + DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + JdbcTaskExecutionDao dao = new JdbcTaskExecutionDao(dataSource); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } + catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + dao.setTaskIncrementer(incrementerFactory.getIncrementer(databaseType, "TASK_SEQ")); + return dao; + } + + @Bean + public JobService jobService(DataSource dataSource, PlatformTransactionManager platformTransactionManager, + JobRepository jobRepository, JobExplorer jobExplorer, Environment environment) + throws Exception{ + SimpleJobServiceFactoryBean factoryBean = new SimpleJobServiceFactoryBean(); + factoryBean.setEnvironment(environment); + factoryBean.setDataSource(dataSource); + factoryBean.setTransactionManager(platformTransactionManager); + factoryBean.setJobLauncher(new TaskExecutorJobLauncher()); + factoryBean.setJobExplorer(jobExplorer); + factoryBean.setJobRepository(jobRepository); + factoryBean.setSerializer(new AllInOneExecutionContextSerializer()); + try { + factoryBean.afterPropertiesSet(); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JobService", x); + } + return factoryBean.getObject(); + } + + @Bean + public JdbcSearchableJobExecutionDao jobExecutionDao(DataSource dataSource) { + JdbcSearchableJobExecutionDao jdbcSearchableJobExecutionDao = new JdbcSearchableJobExecutionDao(); + jdbcSearchableJobExecutionDao.setDataSource(dataSource); + try { + jdbcSearchableJobExecutionDao.afterPropertiesSet(); + } + catch (Throwable x) { + throw new RuntimeException("Exception creating JdbcSearchableJobExecutionDao", x); + } + return jdbcSearchableJobExecutionDao; + } + + @Bean + @ConditionalOnMissingBean + public TaskDefinitionReader taskDefinitionReader(TaskDefinitionRepository repository) { + return new DefaultTaskDefinitionReader(repository); + } + + @Bean + @ConditionalOnMissingBean + public TaskDeploymentReader taskDeploymentReader(TaskDeploymentRepository repository) { + return new DefaultTaskDeploymentReader(repository); + } + + @Bean + public JdbcTaskBatchDao taskBatchDao(DataSource dataSource) { + return new JdbcTaskBatchDao(dataSource); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowAsyncAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowAsyncAutoConfiguration.java new file mode 100644 index 0000000000..25937f61a7 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowAsyncAutoConfiguration.java @@ -0,0 +1,77 @@ +/* + * Copyright 2016-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.config; + +import java.util.concurrent.Executor; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler; +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.AutoConfigureAfter; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.autoconfigure.task.TaskExecutionAutoConfiguration; +import org.springframework.boot.task.ThreadPoolTaskExecutorBuilder; +import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; +import org.springframework.context.annotation.Bean; +import org.springframework.scheduling.annotation.AsyncConfigurer; +import org.springframework.scheduling.annotation.EnableAsync; + +import static org.springframework.cloud.dataflow.server.config.DataflowAsyncAutoConfiguration.ASYNC_PROPS_PREFIX; + +/** + * Enables async executions for the Spring Cloud Dataflow server. + * Uses the Spring Boot autoconfigured {@code TaskExecutorBuilder} to create an async executor and register it + * with name {@link #DATAFLOW_ASYNC_EXECUTOR}. + * + * @author Tobias Soloschenko + */ +@AutoConfiguration +@ConditionalOnBean({EnableDataFlowServerConfiguration.Marker.class}) +@ConditionalOnProperty(prefix = ASYNC_PROPS_PREFIX, name = "enabled", havingValue = "true") +@AutoConfigureAfter(TaskExecutionAutoConfiguration.class) +@EnableAsync +public class DataflowAsyncAutoConfiguration implements AsyncConfigurer { + + private static final Logger logger = LoggerFactory.getLogger(DataflowAsyncAutoConfiguration.class); + + public static final String ASYNC_PROPS_PREFIX = DataFlowPropertyKeys.PREFIX + "async"; + + public static final String DATAFLOW_ASYNC_EXECUTOR = "dataflowAsyncExecutor"; + + private static final String THREAD_NAME_PREFIX = "scdf-async-"; + + private final ThreadPoolTaskExecutorBuilder taskExecutorBuilder; + + public DataflowAsyncAutoConfiguration(ThreadPoolTaskExecutorBuilder taskExecutorBuilder) { + this.taskExecutorBuilder = taskExecutorBuilder; + } + + @Bean(name = DATAFLOW_ASYNC_EXECUTOR) + @Override + public Executor getAsyncExecutor() { + return this.taskExecutorBuilder.threadNamePrefix(THREAD_NAME_PREFIX).build(); + } + + @Override + public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() { + return (throwable, method, objects) -> logger.error("Exception thrown in @Async Method " + method.getName(), + throwable); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java index cf54311073..d27ab9533f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataflowOAuthSecurityConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2017 the original author or authors. + * Copyright 2016-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,13 +13,61 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.config; -import org.springframework.cloud.common.security.OAuthSecurityConfiguration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.springframework.beans.factory.ObjectProvider; +import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties; +import org.springframework.boot.autoconfigure.security.oauth2.resource.OAuth2ResourceServerProperties; +import org.springframework.cloud.common.security.AuthorizationProperties; +import org.springframework.cloud.common.security.OAuthClientConfiguration; +import org.springframework.cloud.common.security.ProviderRoleMapping; +import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.cloud.common.security.support.AccessTokenClearingLogoutSuccessHandler; +import org.springframework.cloud.common.security.support.MappingJwtGrantedAuthoritiesConverter; import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled; +import org.springframework.cloud.common.security.support.SecurityConfigUtils; +import org.springframework.cloud.common.security.support.SecurityStateBean; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.core.convert.converter.Converter; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.security.authentication.AbstractAuthenticationToken; +import org.springframework.security.authentication.AuthenticationManager; +import org.springframework.security.config.Customizer; import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer; +import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserService; +import org.springframework.security.oauth2.core.oidc.user.OidcUser; +import org.springframework.security.oauth2.core.user.OAuth2User; +import org.springframework.security.oauth2.jwt.Jwt; +import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationConverter; +import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector; +import org.springframework.security.web.SecurityFilterChain; +import org.springframework.security.web.authentication.HttpStatusEntryPoint; +import org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint; +import org.springframework.security.web.authentication.logout.LogoutSuccessHandler; +import org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint; +import org.springframework.security.web.authentication.www.BasicAuthenticationFilter; +import org.springframework.security.web.util.matcher.AnyRequestMatcher; +import org.springframework.security.web.util.matcher.MediaTypeRequestMatcher; +import org.springframework.security.web.util.matcher.RequestHeaderRequestMatcher; +import org.springframework.security.web.util.matcher.RequestMatcher; +import org.springframework.util.StringUtils; +import org.springframework.web.HttpMediaTypeNotAcceptableException; +import org.springframework.web.accept.HeaderContentNegotiationStrategy; +import org.springframework.web.context.request.NativeWebRequest; /** * Setup Spring Security OAuth for the Rest Endpoints of Spring Cloud Data Flow. @@ -30,11 +78,157 @@ */ @Configuration @Conditional(OnOAuth2SecurityEnabled.class) -public class DataflowOAuthSecurityConfiguration extends OAuthSecurityConfiguration { +@Import({ OAuthClientConfiguration.class }) +@EnableWebSecurity +public class DataflowOAuthSecurityConfiguration { + + private final OpaqueTokenIntrospector opaqueTokenIntrospector; + private final AuthenticationManager authenticationManager; + private final AuthorizationProperties authorizationProperties; + private final OAuth2UserService plainOauth2UserService; + private final OAuth2UserService oidcUserService; + private final OAuth2ResourceServerProperties oAuth2ResourceServerProperties; + private final OAuth2ClientProperties oauth2ClientProperties; + private final SecurityStateBean securityStateBean; + private final OAuth2TokenUtilsService oauth2TokenUtilsService; + + public DataflowOAuthSecurityConfiguration(ObjectProvider opaqueTokenIntrospector, + ObjectProvider authenticationManager, + ObjectProvider authorizationProperties, + ObjectProvider> plainOauth2UserService, + ObjectProvider> oidcUserService, + ObjectProvider oAuth2ResourceServerProperties, + ObjectProvider oauth2ClientProperties, + ObjectProvider securityStateBean, + ObjectProvider oauth2TokenUtilsService + ) { + this.opaqueTokenIntrospector = opaqueTokenIntrospector.getIfAvailable(); + this.authenticationManager = authenticationManager.getIfAvailable(); + this.authorizationProperties = authorizationProperties.getIfAvailable(); + this.plainOauth2UserService = plainOauth2UserService.getIfAvailable(); + this.oidcUserService = oidcUserService.getIfAvailable(); + this.oAuth2ResourceServerProperties = oAuth2ResourceServerProperties.getIfAvailable(); + this.oauth2ClientProperties = oauth2ClientProperties.getIfAvailable(); + this.securityStateBean = securityStateBean.getIfAvailable(); + this.oauth2TokenUtilsService = oauth2TokenUtilsService.getIfAvailable(); + } + + @Bean + public SecurityFilterChain filterChain(HttpSecurity http) throws Exception { + + BasicAuthenticationEntryPoint basicAuthenticationEntryPoint = new BasicAuthenticationEntryPoint(); + basicAuthenticationEntryPoint.setRealmName(SecurityConfigUtils.BASIC_AUTH_REALM_NAME); + basicAuthenticationEntryPoint.afterPropertiesSet(); + + if (opaqueTokenIntrospector != null) { + BasicAuthenticationFilter basicAuthenticationFilter = new BasicAuthenticationFilter( + authenticationManager, basicAuthenticationEntryPoint); + http.addFilter(basicAuthenticationFilter); + } + + List authenticatedPaths = new ArrayList<>(authorizationProperties.getAuthenticatedPaths()); + authenticatedPaths.add("/"); + authenticatedPaths.add(dashboard(authorizationProperties, "/**")); + authenticatedPaths.add(authorizationProperties.getDashboardUrl()); + + List permitAllPaths = new ArrayList<>(authorizationProperties.getPermitAllPaths()); + permitAllPaths.add(this.authorizationProperties.getDashboardUrl()); + permitAllPaths.add(dashboard(authorizationProperties, "/**")); + + http.authorizeHttpRequests(auth -> { + auth.requestMatchers(permitAllPaths.toArray(new String[0])).permitAll(); + auth.requestMatchers(authenticatedPaths.toArray(new String[0])).authenticated(); + SecurityConfigUtils.configureSimpleSecurity(auth, authorizationProperties); + }); + + http.httpBasic(Customizer.withDefaults()); + + http.logout(auth -> { + auth.logoutSuccessHandler(logoutSuccessHandler(authorizationProperties, oauth2TokenUtilsService)); + }); + + http.csrf(AbstractHttpConfigurer::disable); + + http.exceptionHandling(auth -> { + auth.defaultAuthenticationEntryPointFor(new HttpStatusEntryPoint(HttpStatus.UNAUTHORIZED), + new RequestHeaderRequestMatcher("X-Requested-With", "XMLHttpRequest")); + RequestMatcher textHtmlMatcher = new MediaTypeRequestMatcher( + new BrowserDetectingContentNegotiationStrategy(), MediaType.TEXT_HTML); + auth.defaultAuthenticationEntryPointFor( + new LoginUrlAuthenticationEntryPoint(this.authorizationProperties.getLoginProcessingUrl()), + textHtmlMatcher); + auth.defaultAuthenticationEntryPointFor(basicAuthenticationEntryPoint, AnyRequestMatcher.INSTANCE); + }); + + http.oauth2Login(auth -> { + auth.userInfoEndpoint(customizer -> { + customizer.userService(plainOauth2UserService).oidcUserService(oidcUserService); + }); + auth.defaultSuccessUrl(authorizationProperties.getDashboardUrl()); + }); + + http.oauth2ResourceServer(resourceserver -> { + if (opaqueTokenIntrospector != null) { + resourceserver.opaqueToken(opaqueToken -> { + opaqueToken.introspector(opaqueTokenIntrospector); + }); + } + else if (oAuth2ResourceServerProperties.getJwt().getJwkSetUri() != null) { + resourceserver.jwt(jwt -> { + jwt.jwtAuthenticationConverter(grantedAuthoritiesExtractor()); + }); + } + }); + + securityStateBean.setAuthenticationEnabled(true); + + return http.build(); + } + + private static String dashboard(AuthorizationProperties authorizationProperties, String path) { + return authorizationProperties.getDashboardUrl() + path; + } + + private LogoutSuccessHandler logoutSuccessHandler(AuthorizationProperties authorizationProperties, + OAuth2TokenUtilsService oauth2TokenUtilsService) { + AccessTokenClearingLogoutSuccessHandler logoutSuccessHandler = + new AccessTokenClearingLogoutSuccessHandler(oauth2TokenUtilsService); + logoutSuccessHandler.setDefaultTargetUrl(dashboard(authorizationProperties, "/logout-success-oauth.html")); + return logoutSuccessHandler; + } + + private static class BrowserDetectingContentNegotiationStrategy extends HeaderContentNegotiationStrategy { + @Override + public List resolveMediaTypes(NativeWebRequest request) throws HttpMediaTypeNotAcceptableException { + final List supportedMediaTypes = super.resolveMediaTypes(request); + final String userAgent = request.getHeader(HttpHeaders.USER_AGENT); + if (userAgent != null && userAgent.contains("Mozilla/5.0") + && !supportedMediaTypes.contains(MediaType.APPLICATION_JSON)) { + return Collections.singletonList(MediaType.TEXT_HTML); + } + return Collections.singletonList(MediaType.APPLICATION_JSON); + } + } + + private Converter grantedAuthoritiesExtractor() { + String providerId = OAuthClientConfiguration.calculateDefaultProviderId(authorizationProperties, oauth2ClientProperties); + ProviderRoleMapping providerRoleMapping = authorizationProperties.getProviderRoleMappings() + .get(providerId); + + JwtAuthenticationConverter jwtAuthenticationConverter = + new JwtAuthenticationConverter(); - @Override - protected void configure(HttpSecurity http) throws Exception { - super.configure(http); + MappingJwtGrantedAuthoritiesConverter converter = new MappingJwtGrantedAuthoritiesConverter(); + converter.setAuthorityPrefix(""); + jwtAuthenticationConverter.setJwtGrantedAuthoritiesConverter(converter); + if (providerRoleMapping != null) { + converter.setAuthoritiesMapping(providerRoleMapping.getRoleMappings()); + converter.setGroupAuthoritiesMapping(providerRoleMapping.getGroupMappings()); + if (StringUtils.hasText(providerRoleMapping.getPrincipalClaimName())) { + jwtAuthenticationConverter.setPrincipalClaimName(providerRoleMapping.getPrincipalClaimName()); + } + } + return jwtAuthenticationConverter; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DefaultEnvironmentPostProcessor.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DefaultEnvironmentPostProcessor.java index 04b9be420d..c30975ea74 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DefaultEnvironmentPostProcessor.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DefaultEnvironmentPostProcessor.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; import org.springframework.boot.SpringApplication; +import org.springframework.boot.context.config.ConfigDataEnvironmentPostProcessor; import org.springframework.boot.env.EnvironmentPostProcessor; import org.springframework.core.Ordered; import org.springframework.core.env.ConfigurableEnvironment; @@ -49,6 +50,16 @@ public class DefaultEnvironmentPostProcessor implements EnvironmentPostProcessor private static final Logger logger = LoggerFactory.getLogger(DefaultEnvironmentPostProcessor.class); + /** + * The order the processor is invoked. + *

    Must execute after the {@link ConfigDataEnvironmentPostProcessor} because they both use the {@code addLast} + * API to add their property source and the default EPP should have lower precedence. + *

    Must execute before the {@code ConfigDataMissingEnvironmentPostProcessor} because the legacy config data + * flag is set in the default dataflow properties and without this flag the server will not start. The config data + * missing has an order of {@code ConfigDataEnvironmentPostProcessor.ORDER + 1000} so we simply anchor below that. + */ + public static final int ORDER = ConfigDataEnvironmentPostProcessor.ORDER + 900; + private final Resource serverResource = new ClassPathResource("/dataflow-server.yml"); private final Resource serverDefaultsResource = new ClassPathResource("META-INF/dataflow-server-defaults.yml"); @@ -106,6 +117,6 @@ public void postProcessEnvironment(ConfigurableEnvironment environment, SpringAp @Override public int getOrder() { - return 0; + return ORDER; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/H2ServerConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/H2ServerConfiguration.java new file mode 100644 index 0000000000..a099e95198 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/H2ServerConfiguration.java @@ -0,0 +1,91 @@ +/* + * Copyright 2022-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.config; + +import java.sql.SQLException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.h2.tools.Server; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.config.BeanFactoryPostProcessor; +import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; +import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; +import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Autostart an embedded H2 database server. + * + * @author Michael Wirth + * @author Corneil du Plessis + */ +@Configuration(proxyBeanMethods = false) +@ConditionalOnClass(Server.class) +@ConditionalOnProperty(name = "spring.dataflow.embedded.database.enabled", havingValue = "true", matchIfMissing = true) +@ConditionalOnExpression("'${spring.datasource.url:#{null}}'.startsWith('jdbc:h2:tcp://localhost')") +public class H2ServerConfiguration { + + private static final Logger logger = LoggerFactory.getLogger(H2ServerConfiguration.class); + + private static final Pattern JDBC_URL_PATTERN = Pattern.compile("^jdbc:h2:tcp://localhost:(?\\d+)"); + + @Bean + public H2ServerBeanFactoryPostProcessor h2ServerBeanFactoryPostProcessor() { + return new H2ServerBeanFactoryPostProcessor(); + } + + @Bean(destroyMethod = "stop") + public Server h2TcpServer(@Value("${spring.datasource.url}") String dataSourceUrl) { + logger.info("Starting H2 Server with URL: " + dataSourceUrl); + + Matcher matcher = JDBC_URL_PATTERN.matcher(dataSourceUrl); + if (!matcher.find()) { + throw new IllegalArgumentException( + "DataSource URL '" + dataSourceUrl + "' does not match regex pattern: " + + JDBC_URL_PATTERN.pattern()); + } + + String port = matcher.group("port"); + try { + return Server.createTcpServer("-ifNotExists", "-tcp", + "-tcpAllowOthers", "-tcpPort", port).start(); + } + catch (SQLException e) { + throw new IllegalStateException(e); + } + + } + + /** + * A {@link BeanFactoryPostProcessor} whose sole job is to ensure that the H2 server is up and running before any + * datasource initialization is attempted. It does this by requesting the H2Server bean which then in turn starts up + * the server. + */ + static class H2ServerBeanFactoryPostProcessor implements BeanFactoryPostProcessor { + + @Override + public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { + beanFactory.getBean("h2TcpServer"); + } + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java index 629c5cd2e4..7efc2d4fe7 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/MetricsReplicationEnvironmentPostProcessor.java @@ -29,7 +29,7 @@ import org.springframework.boot.SpringApplication; import org.springframework.boot.actuate.autoconfigure.metrics.export.influx.InfluxProperties; import org.springframework.boot.actuate.autoconfigure.metrics.export.prometheus.PrometheusProperties; -import org.springframework.boot.actuate.autoconfigure.metrics.export.wavefront.WavefrontProperties; +import org.springframework.boot.actuate.autoconfigure.wavefront.WavefrontProperties; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.env.EnvironmentPostProcessor; import org.springframework.cloud.dataflow.core.RelaxedNames; @@ -37,6 +37,7 @@ import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.core.Ordered; import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.core.env.Environment; import org.springframework.core.env.PropertiesPropertySource; import org.springframework.util.StringUtils; @@ -44,22 +45,25 @@ * This post-processor helps to replicate the metrics property defined for the DataFlow server to the * spring.cloud.dataflow.applicationProperties.stream.* and spring.cloud.dataflow.applicationProperties.task.* as well. * This allows to reuse the same metrics configuration for all deployed stream applications and launched tasks. - * - * The post-processor also automatically computes some of the the Monitoring Dashboard properties from the server's + *
    + * The post-processor also automatically computes some Monitoring Dashboard properties from the server's * metrics properties. - * + *
    * Only the properties not explicitly set are updated. That means that you can explicitly set any monitoring dashboard or * stream/task metrics and your settings will be honored. * * @author Christian Tzolov + * @author Chris Bono */ public class MetricsReplicationEnvironmentPostProcessor implements EnvironmentPostProcessor, Ordered { private static final Logger logger = LoggerFactory.getLogger(MetricsReplicationEnvironmentPostProcessor.class); private static final String PROPERTY_SOURCE_KEY_NAME = MetricsReplicationEnvironmentPostProcessor.class.getName(); - public static final String MONITORING_PREFIX = retrievePropertyPrefix(DataflowMetricsProperties.class); - public static final String MONITORING_DASHBOARD_PREFIX = MONITORING_PREFIX + ".dashboard"; - public static final String COMMON_APPLICATION_PREFIX = retrievePropertyPrefix(CommonApplicationProperties.class); + private static final String MONITORING_PREFIX = retrievePropertyPrefix(DataflowMetricsProperties.class); + private static final String MONITORING_DASHBOARD_PREFIX = MONITORING_PREFIX + ".dashboard"; + private static final String COMMON_APPLICATION_PREFIX = retrievePropertyPrefix(CommonApplicationProperties.class); + private static final String COMMON_STREAM_PROPS_PREFIX = COMMON_APPLICATION_PREFIX + ".stream."; + private static final String COMMON_TASK_PROPS_PREFIX = COMMON_APPLICATION_PREFIX + ".task."; @Override public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { @@ -76,34 +80,20 @@ public void postProcessEnvironment(ConfigurableEnvironment environment, SpringAp // 2. Replicates the server's metrics properties to the applicationProperties.stream // and applicationProperties.task. if (environment.getProperty(MONITORING_PREFIX + ".property-replication", Boolean.class, true)) { - // Callback function that checks if the input property is set the server's configuration. If it is then - // the property is replicated as a common Stream and Task property. - Consumer propertyReplicator = metricsPropertyName -> { - if (environment.containsProperty(metricsPropertyName)) { + // Callback function to handle property replication + Consumer propertyReplicator = metricsPropName -> { + if (environment.containsProperty(metricsPropName)) { try { - String serverPropertyValue = environment.getProperty(metricsPropertyName); - // Overrides only the Stream applicationProperties that have not been set explicitly. - String commonStreamPropertyName = COMMON_APPLICATION_PREFIX + ".stream." + metricsPropertyName; - if (!environment.containsProperty(commonStreamPropertyName)) { - logger.info("Replicate metrics property:" + commonStreamPropertyName + "=" + serverPropertyValue); - // if a property with same key occurs multiple times only the first is set. - additionalProperties.putIfAbsent(commonStreamPropertyName, serverPropertyValue); - } - // Overrides only the Task applicationProperties that have not been set explicitly. - String commonTaskPropertyName = COMMON_APPLICATION_PREFIX + ".task." + metricsPropertyName; - if (!environment.containsProperty(commonTaskPropertyName)) { - logger.info("Replicate metrics property:" + commonTaskPropertyName + "=" + serverPropertyValue); - // if a property with same key occurs multiple times only the first is set. - additionalProperties.putIfAbsent(commonTaskPropertyName, serverPropertyValue); - } + String serverPropValue = environment.getProperty(metricsPropName); + ensurePropIsReplicatedExactlyOnceToCommonStreamsAndTasksProps(metricsPropName, serverPropValue, + environment, additionalProperties); } catch (Throwable throwable) { - logger.error("Failed with replicating {}, because of {}", metricsPropertyName, + logger.error("Failed with replicating {}, because of {}", metricsPropName, ExceptionUtils.getRootCauseMessage(throwable)); } } }; - this.replicateServerMetricsPropertiesToStreamAndTask(environment, WavefrontProperties.class, propertyReplicator); this.replicateServerMetricsPropertiesToStreamAndTask(environment, InfluxProperties.class, propertyReplicator); this.replicateServerMetricsPropertiesToStreamAndTask(environment, PrometheusProperties.class, propertyReplicator); @@ -121,32 +111,58 @@ public void postProcessEnvironment(ConfigurableEnvironment environment, SpringAp } } + private void ensurePropIsReplicatedExactlyOnceToCommonStreamsAndTasksProps(String metricsPropName, Object serverPropValue, + Environment environment, Properties additionalProperties) { + ensurePropIsReplicatedExactlyOnceToCommonProps(metricsPropName, serverPropValue, COMMON_STREAM_PROPS_PREFIX, + environment, additionalProperties); + ensurePropIsReplicatedExactlyOnceToCommonProps(metricsPropName, serverPropValue, COMMON_TASK_PROPS_PREFIX, + environment, additionalProperties); + } + + private void ensurePropIsReplicatedExactlyOnceToCommonProps(String metricsPropName, + Object serverPropValue, String commonPropsPrefix, Environment environment, Properties additionalProperties) { + // Only add if not already added explicitly + String commonStreamPropName = commonPropsPrefix + metricsPropName; + if (!environment.containsProperty(commonStreamPropName)) { + logger.info("Replicate metrics property:{}={}", commonStreamPropName, serverPropValue); + // Only add it once + additionalProperties.putIfAbsent(commonStreamPropName, serverPropValue); + } + } + /** - * Checks if the management.metrics.export..enabled property is set to ture for the provided - * meterRegistryPropertyClass. + * Checks if the 'management..metrics.export.enabled' property is set to true for the specified + * meter registry. * - * @param meterRegistryPropertyClass Property class that follows Boot's meter-registry properties convention. - * @param environment Spring configuration environment. - * @return Returns true if the provide class contains {@link ConfigurationProperties} prefix of type: - * management.metrics.export. and the management.metrics.export..enabled - * property is set to true. Returns false otherwise. + * @param meterRegistryConfigPropsClass the SpringBoot configuration properties for the meter registry + * @param environment the application environment + * @return whether the 'management..metrics.export.enabled' property is set to true for the + * specified meter registry class. */ - private boolean isMetricsRegistryEnabled(Class meterRegistryPropertyClass, ConfigurableEnvironment environment) { - String metricsPrefix = retrievePropertyPrefix(meterRegistryPropertyClass); - return !StringUtils.isEmpty(metricsPrefix) && - environment.getProperty(metricsPrefix + ".enabled", Boolean.class, false); + private boolean isMetricsRegistryEnabled(Class meterRegistryConfigPropsClass, ConfigurableEnvironment environment) { + String metricsPrefix = retrievePropertyPrefix(meterRegistryConfigPropsClass); + if (!StringUtils.hasText(metricsPrefix)) { + logger.warn("Meter registry properties class %s is not a @ConfigurationProperties".formatted(meterRegistryConfigPropsClass)); + return false; + } + // Some metrics props have their 'metrics.export' portion factored into nested classes (e.g. Wavefront) but + // some metrics props still contain 'metrics.export' in their config props prefix (e.g. Influx). + if (!metricsPrefix.endsWith(".metrics.export")) { + metricsPrefix += ".metrics.export"; + } + return environment.getProperty(metricsPrefix + ".enabled", Boolean.class, false); } /** - * Retrieve the prefix name from the ConfigurationProperties annotation if present. - * Return null otherwise. - * @param metricsPropertyClass Property class annotated by the {@link ConfigurationProperties} annotation. - * @return Returns the ConfigurationProperties the non empty prefix or value. + * Get the value of the {@code prefix} attribute of the {@link ConfigurationProperties} that the property class is + * annotated with. + * @param metricsPropertyClass property class annotated with the config properties + * @return return the value for the prefix of the config properties or null */ private static String retrievePropertyPrefix(Class metricsPropertyClass) { if (metricsPropertyClass.isAnnotationPresent(ConfigurationProperties.class)) { ConfigurationProperties cp = metricsPropertyClass.getAnnotation(ConfigurationProperties.class); - return StringUtils.isEmpty(cp.prefix()) ? cp.value() : cp.prefix(); + return StringUtils.hasText(cp.prefix()) ? cp.prefix() : cp.value(); } return null; } @@ -162,14 +178,14 @@ private void inferMonitoringDashboardProperties(ConfigurableEnvironment environm logger.info("Dashboard type:" + MonitoringDashboardType.WAVEFRONT); properties.setProperty(MONITORING_DASHBOARD_PREFIX + ".type", MonitoringDashboardType.WAVEFRONT.name()); if (!environment.containsProperty(MONITORING_DASHBOARD_PREFIX + ".wavefront.source") - && environment.containsProperty("management.metrics.export.wavefront.source")) { + && environment.containsProperty("management.wavefront.source")) { properties.setProperty(MONITORING_DASHBOARD_PREFIX + ".wavefront.source", - environment.getProperty("management.metrics.export.wavefront.source")); + environment.getProperty("management.wavefront.source")); } if (!environment.containsProperty(MONITORING_DASHBOARD_PREFIX + ".url") && - environment.containsProperty("management.metrics.export.wavefront.uri")) { + environment.containsProperty("management.wavefront.uri")) { properties.setProperty(MONITORING_DASHBOARD_PREFIX + ".url", - environment.getProperty("management.metrics.export.wavefront.uri")); + environment.getProperty("management.wavefront.uri")); } } else if (isMetricsRegistryEnabled(PrometheusProperties.class, environment) @@ -197,9 +213,9 @@ private void replicateServerMetricsPropertiesToStreamAndTask(ConfigurableEnviron Class propertyClass, Consumer propertyReplicator) { try { if (isMetricsRegistryEnabled(propertyClass, environment)) { - // Note: For some meter registries, the management.metrics.export..enabled property + // Note: For some meter registries, the management..metrics.export.enabled property // is not defined as explicit field. We need to handle it explicitly. - propertyReplicator.accept(retrievePropertyPrefix(propertyClass) + ".enabled"); + propertyReplicator.accept(retrievePropertyPrefix(propertyClass) + ".metrics.export.enabled"); traversePropertyClassFields(propertyClass, propertyReplicator); } } @@ -212,7 +228,7 @@ private void replicateServerMetricsPropertiesToStreamAndTask(ConfigurableEnviron * Converts the class fields into metrics property candidates and handles them to the replication handler * to process. The metrics prefix is retrieved from the {@link ConfigurationProperties} annotation. * Drops the non-annotated classes. - * + *
    * The traversePropertyClassFields iterates and repeats the computation over the class's parent * classes when available. * @@ -221,7 +237,7 @@ private void replicateServerMetricsPropertiesToStreamAndTask(ConfigurableEnviron */ private void traversePropertyClassFields(Class metricsPropertyClass, Consumer metricsReplicationHandler) { String metricsPrefix = retrievePropertyPrefix(metricsPropertyClass); - if (!StringUtils.isEmpty(metricsPrefix)) { + if (StringUtils.hasText(metricsPrefix)) { do { traverseClassFieldsRecursively(metricsPropertyClass, metricsPrefix, metricsReplicationHandler); // traverse the parent class if not Object. @@ -234,7 +250,7 @@ private void traversePropertyClassFields(Class metricsPropertyClass, Consumer * Iterate over the fields of the provided class. For non-inner class fields generate a metrics property candidate * and pass it to the metrics replication handler for processing. For the inner-class fields extend the * prefix with the name of the field and call traverseClassFieldsRecursively recursively. - * + *
    * Use the RelaxedNames.camelCaseToHyphenLower utility to convert the field names into property keys. * * @param metricsPropertyClass Class to be processed. @@ -245,13 +261,16 @@ private void traversePropertyClassFields(Class metricsPropertyClass, Consumer private void traverseClassFieldsRecursively(Class metricsPropertyClass, String metricsPrefix, Consumer metricsReplicationHandler) { for (Field field : metricsPropertyClass.getDeclaredFields()) { - if (field.getType().isMemberClass() && Modifier.isStatic(field.getType().getModifiers())) { + var isStaticMemberClass = field.getType().isMemberClass() && Modifier.isStatic(field.getType().getModifiers()); + if (isStaticMemberClass && !field.getType().isEnum()) { // traverse the inner class recursively. - String innerMetricsPrefix = metricsPrefix + "." + RelaxedNames.camelCaseToHyphenLower(field.getName()); + String innerMetricsPrefix = metricsPrefix + "." + + RelaxedNames.camelCaseToHyphenLower(field.getName()); traverseClassFieldsRecursively(field.getType(), innerMetricsPrefix, metricsReplicationHandler); } else { - metricsReplicationHandler.accept(metricsPrefix + "." + RelaxedNames.camelCaseToHyphenLower(field.getName())); + metricsReplicationHandler + .accept(metricsPrefix + "." + RelaxedNames.camelCaseToHyphenLower(field.getName())); } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java new file mode 100644 index 0000000000..99343cbddd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/SpringDocAutoConfiguration.java @@ -0,0 +1,132 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.config; + +import jakarta.annotation.PostConstruct; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springdoc.core.configuration.SpringDocConfiguration; +import org.springdoc.core.properties.SpringDocConfigProperties; +import org.springdoc.core.properties.SwaggerUiConfigProperties; +import org.springdoc.webmvc.ui.SwaggerConfig; + +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.AutoConfigureAfter; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.boot.web.servlet.FilterRegistrationBean; +import org.springframework.cloud.dataflow.server.support.SpringDocJsonDecodeFilter; +import org.springframework.context.annotation.Bean; +import org.springframework.security.config.annotation.web.configuration.WebSecurityCustomizer; + +/** + * Makes SpringDoc public available without any authentication required by initializing a {@link WebSecurityCustomizer} and + * applying all path of SpringDoc to be ignored. Also applies a filter registration bean to unescape JSON content for the + * SpringDoc frontend. + * + * @author Tobias Soloschenko + */ +@AutoConfiguration +@ConditionalOnClass({ SpringDocConfigProperties.class, SwaggerUiConfigProperties.class }) +@ConditionalOnBean({ SpringDocConfigProperties.class, SwaggerUiConfigProperties.class }) +@AutoConfigureAfter({ SpringDocConfiguration.class, SwaggerConfig.class }) +public class SpringDocAutoConfiguration { + + private static final Logger logger = LoggerFactory.getLogger(SpringDocAutoConfiguration.class); + + private final SpringDocConfigProperties springDocConfigProperties; + + private final SwaggerUiConfigProperties swaggerUiConfigProperties; + + /** + * Creates the SpringDocConfiguration with the given properties. + * + * @param springDocConfigProperties the spring doc config properties + * @param swaggerUiConfigProperties the swagger ui config properties + */ + public SpringDocAutoConfiguration(SpringDocConfigProperties springDocConfigProperties, + SwaggerUiConfigProperties swaggerUiConfigProperties) { + this.springDocConfigProperties = springDocConfigProperties; + this.swaggerUiConfigProperties = swaggerUiConfigProperties; + } + + @PostConstruct + void init() { + logger.info("SpringDoc enabled"); + } + + /** + * Creates a web security customizer for the spring security which makes the SpringDoc frontend public available. + * + * @return a web security customizer with security settings for SpringDoc + */ + @Bean + @ConditionalOnMissingBean + public WebSecurityCustomizer springDocWebSecurityCustomizer() { + return (webSecurity -> webSecurity.ignoring().requestMatchers( + "/swagger-ui/**", + getApiDocsPathContext() + "/**", + swaggerUiConfigProperties.getPath(), + swaggerUiConfigProperties.getConfigUrl(), + swaggerUiConfigProperties.getValidatorUrl(), + swaggerUiConfigProperties.getOauth2RedirectUrl(), + springDocConfigProperties.getWebjars().getPrefix(), + springDocConfigProperties.getWebjars().getPrefix() + "/**")); + } + + /** + * Applies {@link SpringDocJsonDecodeFilter} to the filter chain which decodes the JSON of ApiDocs and SwaggerUi so that the SpringDoc frontend is able + * to read it. Spring Cloud Data Flow however requires the JSON to be escaped and wrapped into quotes, because the + * Angular Ui frontend is using it that way. + * + * @return a filter registration bean which unescapes the content of the JSON endpoints of SpringDoc before it is returned. + */ + @Bean + @ConditionalOnMissingBean(name = "springDocJsonDecodeFilterRegistration") + public FilterRegistrationBean springDocJsonDecodeFilterRegistration() { + String apiDocsPathContext = getApiDocsPathContext(); + String swaggerUiConfigContext = getSwaggerUiConfigContext(); + FilterRegistrationBean registrationBean = new FilterRegistrationBean<>(); + registrationBean.setFilter(new SpringDocJsonDecodeFilter()); + registrationBean.addUrlPatterns(apiDocsPathContext, apiDocsPathContext + "/*", swaggerUiConfigContext, + swaggerUiConfigContext + "/*"); + return registrationBean; + } + + /** + * Gets the SwaggerUi config context. For example the default configuration for the SwaggerUi config is /v3/api-docs/swagger-config + * which results in a context of /v3/api-docs. + * + * @return the SwaggerUi config path context + */ + private String getSwaggerUiConfigContext() { + String swaggerUiConfigUrl = swaggerUiConfigProperties.getConfigUrl(); + return swaggerUiConfigUrl.substring(0, swaggerUiConfigUrl.lastIndexOf("/")); + } + + /** + * Gets the ApiDocs context path. For example the default configuration for the ApiDocs path is /v3/api-docs + * which results in a context of /v3. + * + * @return the api docs path context + */ + private String getApiDocsPathContext() { + String apiDocsPath = springDocConfigProperties.getApiDocs().getPath(); + return apiDocsPath.substring(0, apiDocsPath.lastIndexOf("/")); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java index 140955b120..a8da638e0e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/LocalTaskPlatformFactory.java @@ -42,7 +42,7 @@ public LocalTaskPlatformFactory(LocalPlatformProperties platformProperties, Sche @Override public TaskPlatform createTaskPlatform() { TaskPlatform taskPlatform = super.createTaskPlatform(); - if (taskPlatform.getLaunchers().size() == 0) { + if (taskPlatform.getLaunchers().isEmpty()) { taskPlatform.setLaunchers(Collections.singletonList(createDefaultLauncher())); } return taskPlatform; @@ -68,14 +68,13 @@ private Launcher doCreateLauncher(String account, LocalDeployerProperties deploy private String prettyPrintLocalDeployerProperties(LocalDeployerProperties localDeployerProperties) { StringBuilder builder = new StringBuilder(); if (localDeployerProperties.getJavaOpts() != null) { - builder.append("JavaOpts = [" + localDeployerProperties.getJavaOpts() + "], "); + builder.append("JavaOpts = [").append(localDeployerProperties.getJavaOpts()).append("], "); } - builder.append("ShutdownTimeout = [" + localDeployerProperties.getShutdownTimeout() + "], "); - builder.append("EnvVarsToInherit = [" - + StringUtils.arrayToCommaDelimitedString(localDeployerProperties.getEnvVarsToInherit()) + "], "); - builder.append("JavaCmd = [" + localDeployerProperties.getJavaCmd() + "], "); - builder.append("WorkingDirectoriesRoot = [" + localDeployerProperties.getWorkingDirectoriesRoot() + "], "); - builder.append("DeleteFilesOnExit = [" + localDeployerProperties.isDeleteFilesOnExit() + "]"); + builder.append("ShutdownTimeout = [").append(localDeployerProperties.getShutdownTimeout()).append("], "); + builder.append("EnvVarsToInherit = [").append(StringUtils.arrayToCommaDelimitedString(localDeployerProperties.getEnvVarsToInherit())).append("], "); + builder.append("JavaCmd = [").append(localDeployerProperties.getJavaCmd()).append("], "); + builder.append("WorkingDirectoriesRoot = [").append(localDeployerProperties.getWorkingDirectoriesRoot()).append("], "); + builder.append("DeleteFilesOnExit = [").append(localDeployerProperties.isDeleteFilesOnExit()).append("]"); return builder.toString(); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java index 2e0c9ea2f0..dd5d13c77d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/SchedulerConfiguration.java @@ -18,9 +18,6 @@ import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.AllNestedConditions; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; @@ -35,12 +32,14 @@ import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.SchedulerServiceProperties; +import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; import org.springframework.cloud.dataflow.server.service.impl.ComposedTaskRunnerConfigurationProperties; import org.springframework.cloud.dataflow.server.service.impl.DefaultSchedulerService; import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.PropertyResolver; import org.springframework.core.io.ResourceLoader; /** @@ -57,8 +56,6 @@ SchedulerServiceProperties.class }) public class SchedulerConfiguration { - private static Logger logger = LoggerFactory.getLogger(SchedulerConfiguration.class); - @Value("${spring.cloud.dataflow.server.uri:}") private String dataflowServerUri; @@ -72,12 +69,24 @@ public SchedulerService schedulerService(CommonApplicationProperties commonAppli ApplicationConfigurationMetadataResolver metaDataResolver, SchedulerServiceProperties schedulerServiceProperties, AuditRecordService auditRecordService, + TaskExecutionInfoService taskExecutionInfoService, + PropertyResolver propertyResolver, ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { return new DefaultSchedulerService(commonApplicationProperties, - taskPlatforms, taskDefinitionRepository, registry, resourceLoader, - taskConfigurationProperties, dataSourceProperties, - this.dataflowServerUri, metaDataResolver, schedulerServiceProperties, auditRecordService, - composedTaskRunnerConfigurationProperties); + taskPlatforms, + taskDefinitionRepository, + registry, + resourceLoader, + taskConfigurationProperties, + dataSourceProperties, + this.dataflowServerUri, + metaDataResolver, + schedulerServiceProperties, + auditRecordService, + taskExecutionInfoService, + propertyResolver, + composedTaskRunnerConfigurationProperties + ); } public static class SchedulerConfigurationPropertyChecker extends AllNestedConditions { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java index 68f08142af..2b0fe0106a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,18 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.config.features; import java.util.List; import javax.sql.DataSource; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.JobExplorerFactoryBean; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; @@ -34,12 +32,14 @@ import org.springframework.cloud.dataflow.registry.service.AppRegistryService; import org.springframework.cloud.dataflow.server.DockerValidatorProperties; import org.springframework.cloud.dataflow.server.batch.JobService; -import org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean; +import org.springframework.cloud.dataflow.server.config.DataFlowTaskConfiguration; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.job.LauncherRepository; import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDefinitionReader; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDeploymentReader; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; import org.springframework.cloud.dataflow.server.service.DeployerConfigurationMetadataResolver; @@ -60,15 +60,23 @@ import org.springframework.cloud.dataflow.server.service.impl.DefaultTaskSaveService; import org.springframework.cloud.dataflow.server.service.impl.TaskAppDeploymentRequestCreator; import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorerConfiguration; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; +import org.springframework.cloud.dataflow.server.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.server.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.server.task.impl.DefaultDataFlowTaskExecutionQueryDao; import org.springframework.cloud.deployer.spi.scheduler.Scheduler; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskRepository; +import org.springframework.cloud.task.repository.support.SimpleTaskRepository; +import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Profile; +import org.springframework.core.env.PropertyResolver; import org.springframework.data.map.repository.config.EnableMapRepositories; import org.springframework.lang.Nullable; -import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.EnableTransactionManagement; /** @@ -80,22 +88,29 @@ * @author Gunnar Hillert * @author Christian Tzolov * @author David Turanski + * @author Corneil du Plessis */ -@Configuration +@Configuration(proxyBeanMethods = false) @ConditionalOnTasksEnabled -@EnableConfigurationProperties({ TaskConfigurationProperties.class, CommonApplicationProperties.class, - DockerValidatorProperties.class, LocalPlatformProperties.class, ComposedTaskRunnerConfigurationProperties.class +@EnableConfigurationProperties({ + TaskConfigurationProperties.class, + CommonApplicationProperties.class, + DockerValidatorProperties.class, + LocalPlatformProperties.class, + ComposedTaskRunnerConfigurationProperties.class }) @EnableMapRepositories(basePackages = "org.springframework.cloud.dataflow.server.job") @EnableTransactionManagement +@Import({ + TaskConfiguration.TaskDeleteServiceConfig.class, + DataflowTaskExplorerConfiguration.class, + DataFlowTaskConfiguration.class +}) public class TaskConfiguration { @Autowired DataSourceProperties dataSourceProperties; - @Autowired(required = false) - SchedulerService schedulerService; - @Value("${spring.cloud.dataflow.server.uri:}") private String dataflowServerUri; @@ -105,9 +120,22 @@ public class TaskConfiguration { @Autowired private ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + @Bean + @ConditionalOnMissingBean + public TaskDefinitionReader taskDefinitionReader(TaskDefinitionRepository taskDefinitionRepository) { + return new DefaultTaskDefinitionReader(taskDefinitionRepository); + } + + @Bean + @ConditionalOnMissingBean + public TaskDeploymentReader taskDeploymentReader(TaskDeploymentRepository repository) { + return new DefaultTaskDeploymentReader(repository); + } + @Bean public DeployerConfigurationMetadataResolver deployerConfigurationMetadataResolver( - TaskConfigurationProperties taskConfigurationProperties) { + TaskConfigurationProperties taskConfigurationProperties + ) { return new DeployerConfigurationMetadataResolver(taskConfigurationProperties.getDeployerProperties()); } @@ -115,22 +143,25 @@ public DeployerConfigurationMetadataResolver deployerConfigurationMetadataResolv public LauncherInitializationService launcherInitializationService( LauncherRepository launcherRepository, List platforms, - DeployerConfigurationMetadataResolver resolver) { + DeployerConfigurationMetadataResolver resolver + ) { return new LauncherInitializationService(launcherRepository, platforms, resolver); } /** * The default profile is active when no other profiles are active. This is configured so * that several tests will pass without having to explicitly enable the local profile. - * @param localPlatformProperties the local platform properties - * @param localScheduler the local scheduler * + * @param localPlatformProperties the local platform properties + * @param localScheduler the local scheduler * @return the task platform */ - @Profile({ "local", "default" }) + @Profile({"local", "default"}) @Bean - public TaskPlatform localTaskPlatform(LocalPlatformProperties localPlatformProperties, - @Nullable Scheduler localScheduler) { + public TaskPlatform localTaskPlatform( + LocalPlatformProperties localPlatformProperties, + @Nullable Scheduler localScheduler + ) { TaskPlatform taskPlatform = new LocalTaskPlatformFactory(localPlatformProperties, localScheduler) .createTaskPlatform(); taskPlatform.setPrimary(true); @@ -138,107 +169,150 @@ public TaskPlatform localTaskPlatform(LocalPlatformProperties localPlatformPrope } @Bean - public TaskExecutionInfoService taskDefinitionRetriever(AppRegistryService registry, - TaskExplorer taskExplorer, TaskDefinitionRepository taskDefinitionRepository, + public TaskExecutionInfoService taskDefinitionRetriever( + AppRegistryService registry, + DataflowTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, TaskConfigurationProperties taskConfigurationProperties, - LauncherRepository launcherRepository, List taskPlatforms, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + LauncherRepository launcherRepository, + List taskPlatforms, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { return new DefaultTaskExecutionInfoService(dataSourceProperties, registry, taskExplorer, taskDefinitionRepository, taskConfigurationProperties, launcherRepository, taskPlatforms, composedTaskRunnerConfigurationProperties); } @Bean - public TaskDeleteService deleteTaskService(TaskExplorer taskExplorer, LauncherRepository launcherRepository, - TaskDefinitionRepository taskDefinitionRepository, TaskDeploymentRepository taskDeploymentRepository, - AuditRecordService auditRecordService, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowJobExecutionDao dataflowJobExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao) { - return new DefaultTaskDeleteService(taskExplorer, launcherRepository, taskDefinitionRepository, - taskDeploymentRepository, - auditRecordService, - dataflowTaskExecutionDao, - dataflowJobExecutionDao, - dataflowTaskExecutionMetadataDao, - this.schedulerService); - } - - @Bean - public TaskSaveService saveTaskService(TaskDefinitionRepository taskDefinitionRepository, - AuditRecordService auditRecordService, AppRegistryService registry) { + public TaskSaveService saveTaskService( + TaskDefinitionRepository taskDefinitionRepository, + AuditRecordService auditRecordService, AppRegistryService registry + ) { return new DefaultTaskSaveService(taskDefinitionRepository, auditRecordService, registry); } @Bean - public TaskExecutionCreationService taskExecutionRepositoryService(TaskRepository taskRepository) { + public TaskExecutionCreationService taskExecutionRepositoryService( + TaskRepository taskRepository + ) { return new DefaultTaskExecutionRepositoryService(taskRepository); } @Bean public TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator( CommonApplicationProperties commonApplicationProperties, - ApplicationConfigurationMetadataResolver metadataResolver) { + ApplicationConfigurationMetadataResolver metadataResolver + ) { return new TaskAppDeploymentRequestCreator(commonApplicationProperties, metadataResolver, dataflowServerUri); } @Bean - public TaskExecutionService taskService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService) { - DefaultTaskExecutionService defaultTaskExecutionService = new DefaultTaskExecutionService( - launcherRepository, auditRecordService, taskRepository, - taskExecutionInfoService, taskDeploymentRepository, taskExecutionRepositoryService, - taskAppDeploymentRequestCreator, taskExplorer, dataflowTaskExecutionDao, - dataflowTaskExecutionMetadataDao, oauth2TokenUtilsService, taskSaveService, - this.taskConfigurationProperties, this.composedTaskRunnerConfigurationProperties); - defaultTaskExecutionService.setAutoCreateTaskDefinitions(this.taskConfigurationProperties.isAutoCreateTaskDefinitions()); - return defaultTaskExecutionService; + public TaskRepository taskRepository(DataSource dataSource) { + TaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean = new TaskExecutionDaoFactoryBean(dataSource, "TASK_"); + return new SimpleTaskRepository(taskExecutionDaoFactoryBean); } @Bean - public TaskJobService taskJobExecutionRepository(JobService service, TaskExplorer taskExplorer, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionService taskExecutionService) { - return new DefaultTaskJobService(service, taskExplorer, taskDefinitionRepository, taskExecutionService); + public DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao( + DataSource dataSource) { + return new DefaultDataFlowTaskExecutionQueryDao(dataSource); } - @Bean - public SimpleJobServiceFactoryBean simpleJobServiceFactoryBean(DataSource dataSource, - JobRepositoryFactoryBean repositoryFactoryBean, JobExplorer jobExplorer, - PlatformTransactionManager dataSourceTransactionManager) throws Exception { - SimpleJobServiceFactoryBean factoryBean = new SimpleJobServiceFactoryBean(); - factoryBean.setDataSource(dataSource); - factoryBean.setJobRepository(repositoryFactoryBean.getObject()); - factoryBean.setJobLauncher(new SimpleJobLauncher()); - factoryBean.setDataSource(dataSource); - factoryBean.setJobExplorer(jobExplorer); - factoryBean.setTransactionManager(dataSourceTransactionManager); - return factoryBean; + @Configuration + public static class TaskExecutionServiceConfig { + @Bean + public TaskExecutionService taskService( + PropertyResolver propertyResolver, + TaskConfigurationProperties taskConfigurationProperties, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties, + LauncherRepository launcherRepository, + AuditRecordService auditRecordService, + TaskRepository taskRepository, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeploymentRepository taskDeploymentRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskExecutionCreationService taskExecutionRepositoryService, + TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, + DataflowTaskExplorer taskExplorer, + DataflowTaskExecutionDao dataflowTaskExecutionDao, + DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, + @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService, + TaskSaveService taskSaveService + ) { + DefaultTaskExecutionService defaultTaskExecutionService = new DefaultTaskExecutionService( + propertyResolver, + launcherRepository, + auditRecordService, + taskRepository, + taskExecutionInfoService, + taskDeploymentRepository, + taskDefinitionRepository, + taskExecutionRepositoryService, + taskAppDeploymentRequestCreator, + taskExplorer, + dataflowTaskExecutionDao, + dataflowTaskExecutionMetadataDao, + dataflowTaskExecutionQueryDao, + oauth2TokenUtilsService, + taskSaveService, + taskConfigurationProperties, + composedTaskRunnerConfigurationProperties); + defaultTaskExecutionService.setAutoCreateTaskDefinitions(taskConfigurationProperties.isAutoCreateTaskDefinitions()); + return defaultTaskExecutionService; + } } - @Bean - public JobExplorerFactoryBean jobExplorerFactoryBean(DataSource dataSource) { - JobExplorerFactoryBean jobExplorerFactoryBean = new JobExplorerFactoryBean(); - jobExplorerFactoryBean.setDataSource(dataSource); - return jobExplorerFactoryBean; + @Configuration(proxyBeanMethods = false) + public static class TaskJobServiceConfig { + @Bean + public TaskJobService taskJobExecutionRepository( + JobService service, + DataflowTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, + TaskExecutionService taskExecutionService, + LauncherRepository launcherRepository, TaskConfigurationProperties taskConfigurationProperties) { + return new DefaultTaskJobService( + service, + taskExplorer, + taskDefinitionRepository, + taskExecutionService, + launcherRepository, + taskConfigurationProperties + ); + } } - @Bean - public JobRepositoryFactoryBean jobRepositoryFactoryBean(DataSource dataSource, - PlatformTransactionManager platformTransactionManager) { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(dataSource); - repositoryFactoryBean.setTransactionManager(platformTransactionManager); - return repositoryFactoryBean; + @Configuration(proxyBeanMethods = false) + public static class TaskDeleteServiceConfig { + @Bean + public TaskDeleteService deleteTaskService( + DataflowTaskExplorer taskExplorer, + LauncherRepository launcherRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskDeploymentRepository taskDeploymentRepository, + AuditRecordService auditRecordService, + DataflowTaskExecutionDao dataflowTaskExecutionDao, + DataflowJobExecutionDao dataflowJobExecutionDao, + DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, + TaskConfigurationProperties taskConfigurationProperties, + DataSource dataSource, + @Autowired(required = false) SchedulerService schedulerService + ) { + return new DefaultTaskDeleteService( + taskExplorer, + launcherRepository, + taskDefinitionRepository, + taskDeploymentRepository, + auditRecordService, + dataflowTaskExecutionDao, + dataflowJobExecutionDao, + dataflowTaskExecutionMetadataDao, + schedulerService, + taskConfigurationProperties, + dataSource + ); + } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java index 7d2aa3b9d1..b7d5d2be98 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2019 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,44 +15,36 @@ */ package org.springframework.cloud.dataflow.server.config.web; -import java.sql.SQLException; import java.util.Arrays; import java.util.Locale; import java.util.TimeZone; -import javax.servlet.ServletContext; - import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import io.micrometer.core.instrument.LongTaskTimer; import io.micrometer.core.instrument.Metrics; import io.micrometer.core.instrument.Tags; -import org.h2.tools.Server; -import org.slf4j.LoggerFactory; +import jakarta.servlet.ServletContext; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.item.ExecutionContext; import org.springframework.beans.BeansException; -import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.autoconfigure.http.HttpMessageConverters; import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer; import org.springframework.boot.web.servlet.ServletContextInitializer; -import org.springframework.cloud.dataflow.rest.support.jackson.ExecutionContextJacksonMixIn; import org.springframework.cloud.dataflow.rest.support.jackson.ISO8601DateFormatWithMilliSeconds; -import org.springframework.cloud.dataflow.rest.support.jackson.StepExecutionJacksonMixIn; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.event.ContextClosedEvent; +import org.springframework.format.FormatterRegistry; import org.springframework.hateoas.server.core.DefaultLinkRelationProvider; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.ResourceHttpMessageConverter; +import org.springframework.http.converter.StringHttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; -import org.springframework.util.Assert; -import org.springframework.util.StringUtils; import org.springframework.web.servlet.config.annotation.PathMatchConfigurer; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; @@ -65,51 +57,24 @@ * @author Mark Pollack * @author Christian Tzolov * @author David Turanski + * @author Michael Wirth + * @author Chris Bono + * @author Corneil du Plessis */ -@Configuration +@Configuration(proxyBeanMethods = false) @ConditionalOnWebApplication public class WebConfiguration implements ServletContextInitializer, ApplicationListener { - private static final org.slf4j.Logger logger = LoggerFactory.getLogger(WebConfiguration.class); - private static final String REL_PROVIDER_BEAN_NAME = "defaultRelProvider"; - @Value("${spring.datasource.url:#{null}}") - private String dataSourceUrl; - - private Server server = null; private LongTaskTimer.Sample longTaskSample; - public Server initH2TCPServer() { - logger.info("Starting H2 Server with URL: " + dataSourceUrl); - try { - this.server = Server - .createTcpServer("-ifNotExists", "-tcp", "-tcpAllowOthers", "-tcpPort", getH2Port(dataSourceUrl)) - .start(); - } - catch (SQLException e) { - throw new IllegalStateException(e); - } - return server; - } - - private String getH2Port(String url) { - String[] tokens = StringUtils.tokenizeToStringArray(url, ":"); - Assert.isTrue(tokens.length >= 5, "URL not properly formatted"); - return tokens[4].substring(0, tokens[4].indexOf("/")); - } - @Override public void onStartup(ServletContext servletContext) { LongTaskTimer longTaskTimer = LongTaskTimer .builder("spring.cloud.dataflow.server").description("Spring Cloud Data Flow duration timer") .tags(Tags.empty()).register(Metrics.globalRegistry); this.longTaskSample = longTaskTimer.start(); - - if (StringUtils.hasText(dataSourceUrl) && dataSourceUrl.startsWith("jdbc:h2:tcp://localhost:")) { - logger.info("Start Embedded H2"); - initH2TCPServer(); - } } @Bean @@ -118,7 +83,7 @@ public HttpMessageConverters messageConverters(ObjectMapper objectMapper) { // Prevent default converters false, Arrays.>asList(new MappingJackson2HttpMessageConverter(objectMapper), - new ResourceHttpMessageConverter())); + new ResourceHttpMessageConverter(), new StringHttpMessageConverter())); } @Bean @@ -129,6 +94,10 @@ public WebMvcConfigurer configurer() { public void configurePathMatch(PathMatchConfigurer configurer) { configurer.setUseSuffixPatternMatch(false); } + + @Override + public void addFormatters(FormatterRegistry registry) { + } }; } @@ -136,12 +105,7 @@ public void configurePathMatch(PathMatchConfigurer configurer) { public Jackson2ObjectMapperBuilderCustomizer dataflowObjectMapperBuilderCustomizer() { return (builder) -> { builder.dateFormat(new ISO8601DateFormatWithMilliSeconds(TimeZone.getDefault(), Locale.getDefault(), true)); - // apply SCDF Batch Mixins to - // ignore the JobExecution in StepExecution to prevent infinite loop. - // https://github.com/spring-projects/spring-hateoas/issues/333 - builder.mixIn(StepExecution.class, StepExecutionJacksonMixIn.class); - builder.mixIn(ExecutionContext.class, ExecutionContextJacksonMixIn.class); - builder.modules(new JavaTimeModule(), new Jdk8Module()); + builder.modules(new JavaTimeModule(), new Jdk8Module(), new Jackson2DataflowModule()); }; } @@ -174,9 +138,6 @@ public void onApplicationEvent(ContextClosedEvent event) { this.longTaskSample.stop(); this.longTaskSample = null; } - if (this.server != null) { - this.server.stop(); - logger.info("Embedded H2 server stopped!"); - } } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java index cefdd6a464..0df73f449d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AboutController.java @@ -17,14 +17,25 @@ import java.util.ArrayList; import java.util.List; - -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; +import java.util.Map; + +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClients; +import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager; +import org.apache.hc.client5.http.socket.ConnectionSocketFactory; +import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory; +import org.apache.hc.client5.http.ssl.NoopHostnameVerifier; +import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory; +import org.apache.hc.core5.http.config.Lookup; +import org.apache.hc.core5.http.config.RegistryBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.ObjectProvider; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.actuate.info.BuildInfoContributor; +import org.springframework.boot.actuate.info.GitInfoContributor; +import org.springframework.boot.actuate.info.Info; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.common.security.support.SecurityStateBean; import org.springframework.cloud.dataflow.core.Launcher; @@ -37,6 +48,7 @@ import org.springframework.cloud.dataflow.rest.resource.about.RuntimeEnvironmentDetails; import org.springframework.cloud.dataflow.rest.resource.about.SecurityInfo; import org.springframework.cloud.dataflow.rest.resource.about.VersionInfo; +import org.springframework.cloud.dataflow.rest.util.HttpUtils; import org.springframework.cloud.dataflow.server.config.DataflowMetricsProperties; import org.springframework.cloud.dataflow.server.config.VersionInfoProperties; import org.springframework.cloud.dataflow.server.config.features.FeaturesProperties; @@ -45,7 +57,6 @@ import org.springframework.cloud.deployer.spi.core.RuntimeEnvironmentInfo; import org.springframework.cloud.deployer.spi.task.TaskLauncher; import org.springframework.hateoas.server.ExposesResourceFor; -import org.springframework.hateoas.server.mvc.WebMvcLinkBuilder; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -54,15 +65,18 @@ import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.ResourceAccessException; import org.springframework.web.client.RestTemplate; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; + /** * REST controller that provides meta information regarding the dataflow server and its * deployers. @@ -70,6 +84,7 @@ * @author Gunnar Hillert * @author Glenn Renfro * @author Ilayaperumal Gopinathan + * @author Felipe Gutierrez */ @RestController @RequestMapping("/about") @@ -100,14 +115,21 @@ public class AboutController { private DataflowMetricsProperties dataflowMetricsProperties; + private ObjectProvider gitInfoContributor; + + private ObjectProvider buildInfoContributor; + public AboutController(StreamDeployer streamDeployer, LauncherRepository launcherRepository, FeaturesProperties featuresProperties, - VersionInfoProperties versionInfoProperties, SecurityStateBean securityStateBean, DataflowMetricsProperties monitoringProperties) { + VersionInfoProperties versionInfoProperties, SecurityStateBean securityStateBean, DataflowMetricsProperties monitoringProperties, + ObjectProvider gitInfoContributor, ObjectProvider buildInfoContributor) { this.streamDeployer = streamDeployer; this.launcherRepository = launcherRepository; this.featuresProperties = featuresProperties; this.versionInfoProperties = versionInfoProperties; this.securityStateBean = securityStateBean; this.dataflowMetricsProperties = monitoringProperties; + this.gitInfoContributor = gitInfoContributor; + this.buildInfoContributor = buildInfoContributor; } /** @@ -116,7 +138,7 @@ public AboutController(StreamDeployer streamDeployer, LauncherRepository launche * @return Detailed information about the enabled features, versions of implementation * libraries, and security configuration */ - @RequestMapping(method = RequestMethod.GET) + @GetMapping @ResponseStatus(HttpStatus.OK) public AboutResource getAboutResource() { final AboutResource aboutResource = new AboutResource(); @@ -221,7 +243,9 @@ else if (dashboard.getType() == MonitoringDashboardType.WAVEFRONT) { aboutResource.setMonitoringDashboardInfo(monitoringDashboardInfo); } - aboutResource.add(WebMvcLinkBuilder.linkTo(AboutController.class).withSelfRel()); + aboutResource.add(linkTo(AboutController.class).withSelfRel()); + + addGitAndBuildInfoIfAvailable(aboutResource); return aboutResource; } @@ -255,8 +279,14 @@ private String getChecksum(String defaultValue, String url, String version) { String result = defaultValue; if (result == null && StringUtils.hasText(url)) { + ConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(HttpUtils.buildCertificateIgnoringSslContext(), NoopHostnameVerifier.INSTANCE); + + Lookup connSocketFactoryLookup = RegistryBuilder. create() + .register("https", sslsf) + .register("http", new PlainConnectionSocketFactory()) + .build(); CloseableHttpClient httpClient = HttpClients.custom() - .setSSLHostnameVerifier(new NoopHostnameVerifier()) + .setConnectionManager(new BasicHttpClientConnectionManager(connSocketFactoryLookup)) .build(); HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory(); @@ -298,10 +328,9 @@ private String constructUrl(String url, String version) { } private String repoSelector(String version) { - final String REPO_SNAPSHOT_ROOT = "https://repo.spring.io/libs-snapshot"; - final String REPO_MILESTONE_ROOT = "https://repo.spring.io/libs-milestone"; - final String REPO_RELEASE_ROOT = "https://repo.spring.io/libs-release"; - final String MAVEN_ROOT = "https://repo1.maven.org/maven2"; + final String REPO_SNAPSHOT_ROOT = "https://repo.spring.io/snapshot"; + final String REPO_MILESTONE_ROOT = "https://repo.spring.io/milestone"; + final String MAVEN_ROOT = "https://repo.maven.apache.org/maven2"; String result = MAVEN_ROOT; if (version.endsWith("-SNAPSHOT")) { @@ -313,9 +342,17 @@ else if (version.contains(".M")) { else if (version.contains(".RC")) { result = REPO_MILESTONE_ROOT; } - else if (version.contains(".RELEASE")) { - result = REPO_RELEASE_ROOT; - } return result; } + + private void addGitAndBuildInfoIfAvailable(AboutResource aboutResource) { + Info.Builder builder = new Info.Builder(); + gitInfoContributor.ifAvailable(c -> c.contribute(builder)); + buildInfoContributor.ifAvailable(c -> c.contribute(builder)); + Map details = builder.build().getDetails(); + if (!ObjectUtils.isEmpty(details)) { + aboutResource.setGitAndBuildInfo(details); + } + } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java new file mode 100644 index 0000000000..2cb0a2d214 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java @@ -0,0 +1,8 @@ +package org.springframework.cloud.dataflow.server.controller; + +public class ApiNotSupportedException extends RuntimeException { + + public ApiNotSupportedException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java index c97e25d769..b76242d8fa 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2020 the original author or authors. + * Copyright 2015-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,7 +20,6 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -63,10 +62,12 @@ import org.springframework.hateoas.server.RepresentationModelAssembler; import org.springframework.http.HttpStatus; import org.springframework.util.StringUtils; +import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -82,6 +83,7 @@ * @author Patrick Peralta * @author Thomas Risberg * @author Chris Schaefer + * @author Corneil du Plessis */ @RestController @RequestMapping("/apps") @@ -131,6 +133,7 @@ public AppRegistryController(Optional streamDefiniti * @param type the application type: source, sink, processor, task * @param version optional application version * @param search optional findByTaskNameContains parameter + * @param defaultVersion Indicator to use default version. * @return the list of registered applications */ @GetMapping @@ -138,7 +141,7 @@ public AppRegistryController(Optional streamDefiniti public PagedModel list( Pageable pageable, PagedResourcesAssembler pagedResourcesAssembler, - @RequestParam(value = "type", required = false) ApplicationType type, + @RequestParam(required = false) ApplicationType type, @RequestParam(required = false) String search, @RequestParam(required = false) String version, @RequestParam(required = false) boolean defaultVersion) { @@ -158,21 +161,20 @@ public PagedModel list( * @param exhaustive if set to true all properties are returned * @return detailed application information */ - @RequestMapping(value = "/{type}/{name}/{version:.+}", method = RequestMethod.GET) + @GetMapping("/{type}/{name}/{version:.+}") @ResponseStatus(HttpStatus.OK) - public DetailedAppRegistrationResource info(@PathVariable("type") ApplicationType type, - @PathVariable("name") String name, @PathVariable("version") String version, - @RequestParam(required = false, name = "exhaustive") boolean exhaustive) { + public DetailedAppRegistrationResource info(@PathVariable ApplicationType type, + @PathVariable String name, @PathVariable String version, + @RequestParam(required = false) boolean exhaustive) { return getInfo(type, name, version, exhaustive); } - @Deprecated - @RequestMapping(value = "/{type}/{name}", method = RequestMethod.GET) + @GetMapping("/{type}/{name}") @ResponseStatus(HttpStatus.OK) public DetailedAppRegistrationResource info( - @PathVariable("type") ApplicationType type, @PathVariable("name") String name, - @RequestParam(required = false, name = "exhaustive") boolean exhaustive) { + @PathVariable ApplicationType type, @PathVariable String name, + @RequestParam(required = false) boolean exhaustive) { if (!this.appRegistryService.appExist(name, type)) { throw new NoSuchAppRegistrationException(name, type); } @@ -209,6 +211,9 @@ else if (entry.getKey().equals("outbound")) { } } } + Map> groupingsMap = this.metadataResolver + .listOptionGroups(this.appRegistryService.getAppMetadataResource(registration)); + result.getOptionGroups().putAll(groupingsMap); return result; } @@ -218,17 +223,21 @@ else if (entry.getKey().equals("outbound")) { * @param type module type * @param name module name * @param version module version + * @param bootVersion module boot version or {@code null} to use the default. Deprecated: bootVersion parameter is ignored. * @param uri URI for the module artifact (e.g. {@literal maven://group:artifact:version}) * @param metadataUri URI for the metadata artifact * @param force if {@code true}, overwrites a pre-existing registration */ - @RequestMapping(value = "/{type}/{name}/{version:.+}", method = RequestMethod.POST) + @PostMapping("/{type}/{name}/{version:.+}") @ResponseStatus(HttpStatus.CREATED) - public void register(@PathVariable("type") ApplicationType type, @PathVariable("name") String name, - @PathVariable("version") String version, - @RequestParam("uri") String uri, @RequestParam(name = "metadata-uri", required = false) String metadataUri, - @RequestParam(value = "force", defaultValue = "false") boolean force) { - + public void register( + @PathVariable ApplicationType type, + @PathVariable String name, + @PathVariable String version, + @RequestParam(required = false) @Deprecated String bootVersion, + @RequestParam String uri, + @RequestParam(name = "metadata-uri", required = false) String metadataUri, + @RequestParam(defaultValue = "false") boolean force) { validateApplicationName(name); appRegistryService.validate(appRegistryService.getDefaultApp(name, type), uri, version); AppRegistration previous = appRegistryService.find(name, type, version); @@ -236,23 +245,39 @@ public void register(@PathVariable("type") ApplicationType type, @PathVariable(" throw new AppAlreadyRegisteredException(previous); } try { - AppRegistration registration = this.appRegistryService.save(name, type, version, new URI(uri), - metadataUri != null ? new URI(metadataUri) : null); - prefetchMetadata(Arrays.asList(registration)); + AppRegistration registration = this.appRegistryService.save( + name, + type, + version, + new URI(uri), + metadataUri != null ? new URI(metadataUri) : null + ); + prefetchMetadata(Collections.singletonList(registration)); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } } - @Deprecated - @RequestMapping(value = "/{type}/{name}", method = RequestMethod.POST) + @PostMapping("/{type}/{name}") @ResponseStatus(HttpStatus.CREATED) - public void register(@PathVariable("type") ApplicationType type, @PathVariable("name") String name, - @RequestParam("uri") String uri, @RequestParam(name = "metadata-uri", required = false) String metadataUri, - @RequestParam(value = "force", defaultValue = "false") boolean force) { + public void register( + @PathVariable ApplicationType type, + @PathVariable String name, + @RequestParam(required = false) String bootVersion, + @RequestParam String uri, + @RequestParam(name = "metadata-uri", required = false) String metadataUri, + @RequestParam(defaultValue = "false") boolean force) { String version = this.appRegistryService.getResourceVersion(uri); - this.register(type, name, version, uri, metadataUri, force); + this.register( + type, + name, + version, + bootVersion, + uri, + metadataUri, + force + ); } /** @@ -262,10 +287,10 @@ public void register(@PathVariable("type") ApplicationType type, @PathVariable(" * @param name module name * @param version module version */ - @RequestMapping(value = "/{type}/{name}/{version:.+}", method = RequestMethod.PUT) + @PutMapping("/{type}/{name}/{version:.+}") @ResponseStatus(HttpStatus.ACCEPTED) - public void makeDefault(@PathVariable("type") ApplicationType type, @PathVariable("name") String name, - @PathVariable("version") String version) { + public void makeDefault(@PathVariable ApplicationType type, @PathVariable String name, + @PathVariable String version) { this.appRegistryService.setDefaultApp(name, type, version); } @@ -277,10 +302,10 @@ public void makeDefault(@PathVariable("type") ApplicationType type, @PathVariabl * @param name the application name * @param version application version */ - @RequestMapping(value = "/{type}/{name}/{version:.+}", method = RequestMethod.DELETE) + @DeleteMapping("/{type}/{name}/{version:.+}") @ResponseStatus(HttpStatus.OK) - public void unregister(@PathVariable("type") ApplicationType type, @PathVariable("name") String name, - @PathVariable("version") String version) { + public void unregister(@PathVariable ApplicationType type, @PathVariable String name, + @PathVariable String version) { if (type != ApplicationType.task) { String streamWithApp = findStreamContainingAppOf(type, name, version); @@ -348,10 +373,9 @@ private String findStreamContainingAppOf(ApplicationType appType, String appName return null; } - @Deprecated - @RequestMapping(value = "/{type}/{name}", method = RequestMethod.DELETE) + @DeleteMapping("/{type}/{name}") @ResponseStatus(HttpStatus.OK) - public void unregister(@PathVariable("type") ApplicationType type, @PathVariable("name") String name) { + public void unregister(@PathVariable ApplicationType type, @PathVariable String name) { if (this.appRegistryService.find(name, type) == null) { throw new NoSuchAppRegistrationException(name, type); } @@ -362,7 +386,7 @@ public void unregister(@PathVariable("type") ApplicationType type, @PathVariable this.unregister(type, name, appRegistration.getVersion()); } - @RequestMapping(method = RequestMethod.DELETE) + @DeleteMapping @ResponseStatus(HttpStatus.OK) public void unregisterAll() { List appRegistrations = appRegistryService.findAll(); @@ -399,14 +423,14 @@ public void unregisterAll() { * @param force if {@code true}, overwrites any pre-existing registrations * @return the collection of registered applications */ - @RequestMapping(method = RequestMethod.POST) + @PostMapping @ResponseStatus(HttpStatus.CREATED) public PagedModel registerAll( Pageable pageable, PagedResourcesAssembler pagedResourcesAssembler, - @RequestParam(value = "uri", required = false) String uri, - @RequestParam(value = "apps", required = false) String apps, - @RequestParam(value = "force", defaultValue = "false") boolean force) { + @RequestParam(required = false) String uri, + @RequestParam(required = false) String apps, + @RequestParam(defaultValue = "false") boolean force) { List registrations = new ArrayList<>(); if (StringUtils.hasText(uri)) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AuditRecordController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AuditRecordController.java index aa8e43918b..424d2836ff 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AuditRecordController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AuditRecordController.java @@ -41,9 +41,9 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -91,7 +91,7 @@ public AuditRecordController(AuditRecordService auditRecordService) { * retrieve {@link AuditRecord}s * @return list of audit records */ - @RequestMapping(value = "", method = RequestMethod.GET) + @GetMapping("") @ResponseStatus(HttpStatus.OK) public PagedModel list(Pageable pageable, @RequestParam(required = false) AuditActionType[] actions, @@ -120,9 +120,9 @@ public PagedModel list(Pageable pageable, * @param id the id of an existing audit record (required) * @return the audit record or null if the audit record does not exist */ - @RequestMapping(value = "/{id}", method = RequestMethod.GET) + @GetMapping("/{id}") @ResponseStatus(HttpStatus.OK) - public AuditRecordResource display(@PathVariable("id") Long id) { + public AuditRecordResource display(@PathVariable Long id) { AuditRecord auditRecord = this.auditRecordService.findById(id) .orElseThrow(() -> new NoSuchAuditRecordException(id)); return new Assembler(new PageImpl<>(Collections.singletonList(auditRecord))).toModel(auditRecord); @@ -133,7 +133,7 @@ public AuditRecordResource display(@PathVariable("id") Long id) { * * @return Array of AuditOperationTypes */ - @RequestMapping(value = "/audit-operation-types", method = RequestMethod.GET) + @GetMapping("/audit-operation-types") @ResponseStatus(HttpStatus.OK) public AuditOperationType[] getAuditOperationTypes() { return AuditOperationType.values(); @@ -144,7 +144,7 @@ public AuditOperationType[] getAuditOperationTypes() { * * @return Array of AuditActionTypes */ - @RequestMapping(value = "/audit-action-types", method = RequestMethod.GET) + @GetMapping("/audit-action-types") @ResponseStatus(HttpStatus.OK) public AuditActionType[] getAuditActionTypes() { return AuditActionType.values(); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/CompletionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/CompletionController.java index 3d0a9607c2..18565b81af 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/CompletionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/CompletionController.java @@ -18,7 +18,7 @@ import java.util.List; -import javax.validation.constraints.Min; +import jakarta.validation.constraints.Min; import org.springframework.cloud.dataflow.completion.CompletionProposal; import org.springframework.cloud.dataflow.completion.StreamCompletionProvider; @@ -73,8 +73,8 @@ public CompletionController(StreamCompletionProvider completionProvider, * @return the list of completion proposals */ @RequestMapping("/stream") - public CompletionProposalsResource completions(@RequestParam("start") String start, - @RequestParam(value = "detailLevel", defaultValue = "1") @Min(value = 1, message = "The provided detail level must be greater than zero.") int detailLevel) { + public CompletionProposalsResource completions(@RequestParam String start, + @RequestParam(defaultValue = "1") @Min(value = 1, message = "The provided detail level must be greater than zero.") int detailLevel) { return assembler.toModel(completionProvider.complete(start, detailLevel)); } @@ -88,8 +88,8 @@ public CompletionProposalsResource completions(@RequestParam("start") String sta * @return the list of completion proposals */ @RequestMapping("/task") - public CompletionProposalsResource taskCompletions(@RequestParam("start") String start, - @RequestParam(value = "detailLevel", defaultValue = "1") @Min(value = 1, message = "The provided detail level must be greater than zero.") int detailLevel) { + public CompletionProposalsResource taskCompletions(@RequestParam String start, + @RequestParam(defaultValue = "1") @Min(value = 1, message = "The provided detail level must be greater than zero.") int detailLevel) { return assembler.toModel(taskCompletionProvider.complete(start, detailLevel)); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java index 5f631249be..33304ee305 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016=2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,9 +16,11 @@ package org.springframework.cloud.dataflow.server.controller; -import java.util.List; import java.util.TimeZone; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.launch.JobExecutionNotRunningException; @@ -32,33 +34,40 @@ import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for operations on {@link org.springframework.batch.core.JobExecution}. This * includes obtaining Job execution information from the job explorer. * * @author Glenn Renfro * @author Gunnar Hillert + * @author Corneil du Plessis */ @RestController @RequestMapping("/jobs/executions") @ExposesResourceFor(JobExecutionResource.class) public class JobExecutionController { + private static final Logger logger = LoggerFactory.getLogger(JobExecutionController.class); + private final Assembler jobAssembler = new Assembler(); private final TaskJobService taskJobService; @@ -68,7 +77,7 @@ public class JobExecutionController { * a the {@link JobService} * * @param taskJobService the service this controller will use for retrieving job execution - * information. Must not be null. + * information. Must not be null. */ public JobExecutionController(TaskJobService taskJobService) { Assert.notNull(taskJobService, "taskJobService must not be null"); @@ -78,32 +87,24 @@ public JobExecutionController(TaskJobService taskJobService) { /** * Retrieve all task job executions with the task name specified * - * @param jobName name of the job. SQL server specific wildcards are enabled (eg.: myJob%, - * m_Job, ...) - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param jobName name of the job. SQL server specific wildcards are enabled (eg.: myJob%, + * m_Job, ...) + * @param status Optional status criteria. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. - * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobExecutionException if the job execution doesn't exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, produces = "application/json") + @GetMapping(value = "", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByParameters( @RequestParam(value = "name", required = false) String jobName, - @RequestParam(value = "status", required = false) BatchStatus status, + @RequestParam(required = false) BatchStatus status, Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException, NoSuchJobExecutionException { - List jobExecutions; - Page page; - - if (jobName == null && status == null) { - jobExecutions = taskJobService.listJobExecutions(pageable); - page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutions()); - } else { - jobExecutions = taskJobService.listJobExecutionsForJob(pageable, jobName, status); - page = new PageImpl<>(jobExecutions, pageable, - taskJobService.countJobExecutionsForJob(jobName, status)); - } - - return assembler.toModel(page, jobAssembler); + Page jobExecutions = jobName == null && status == null ? taskJobService.listJobExecutions(pageable) + : taskJobService.listJobExecutionsForJob(pageable, jobName, status); + return assembler.toModel(jobExecutions, jobAssembler); } /** @@ -112,14 +113,14 @@ public PagedModel retrieveJobsByParameters( * @param id the id of the requested {@link JobExecution} * @return the {@link JobExecution} * @throws NoSuchJobExecutionException if the specified job execution for the id does not - * exist. + * exist. */ - @RequestMapping(value = "/{id}", method = RequestMethod.GET, produces = "application/json") + @GetMapping(value = "/{id}", produces = "application/json") @ResponseStatus(HttpStatus.OK) - public JobExecutionResource view(@PathVariable("id") long id) throws NoSuchJobExecutionException { + public JobExecutionResource view(@PathVariable long id) throws NoSuchJobExecutionException { TaskJobExecution jobExecution = taskJobService.getJobExecution(id); if (jobExecution == null) { - throw new NoSuchJobExecutionException(String.format("No Job Execution with id of %d exits", id)); + throw new NoSuchJobExecutionException(String.format("No Job Execution with id of %d exists", id)); } return jobAssembler.toModel(jobExecution); } @@ -130,14 +131,15 @@ public JobExecutionResource view(@PathVariable("id") long id) throws NoSuchJobEx * * @param jobExecutionId the executionId of the job execution to stop. * @throws JobExecutionNotRunningException if a stop is requested on a job that is not - * running. - * @throws NoSuchJobExecutionException if the job execution id specified does not exist. + * running. + * @throws NoSuchJobExecutionException if the job execution id specified does not exist. */ - @RequestMapping(value = { "/{executionId}" }, method = RequestMethod.PUT, params = "stop=true") - @ResponseStatus(HttpStatus.OK) - public void stopJobExecution(@PathVariable("executionId") long jobExecutionId) - throws NoSuchJobExecutionException, JobExecutionNotRunningException { + @PutMapping(value = {"/{executionId}"}, params = "stop=true") + + public ResponseEntity stopJobExecution( + @PathVariable("executionId") long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException { taskJobService.stopJobExecution(jobExecutionId); + return ResponseEntity.ok().build(); } /** @@ -146,17 +148,25 @@ public void stopJobExecution(@PathVariable("executionId") long jobExecutionId) * * @param jobExecutionId the executionId of the job execution to restart * @throws NoSuchJobExecutionException if the job execution for the jobExecutionId - * specified does not exist. + * specified does not exist. */ - @RequestMapping(value = { "/{executionId}" }, method = RequestMethod.PUT, params = "restart=true") + @PutMapping(value = {"/{executionId}"}, params = "restart=true") @ResponseStatus(HttpStatus.OK) - public void restartJobExecution(@PathVariable("executionId") long jobExecutionId) - throws NoSuchJobExecutionException { - taskJobService.restartJobExecution(jobExecutionId); + public ResponseEntity restartJobExecution( + @PathVariable("executionId") long jobExecutionId, + @RequestParam(required = false) Boolean useJsonJobParameters) + throws NoSuchJobExecutionException { + try { + taskJobService.restartJobExecution(jobExecutionId, useJsonJobParameters); + } catch (NoSuchJobExecutionException e) { + logger.warn(e.getMessage(), e); + throw e; + } + return ResponseEntity.ok().build(); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that * converts {@link JobExecution}s to {@link JobExecutionResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { @@ -178,12 +188,25 @@ public void setTimeZone(TimeZone timeZone) { @Override public JobExecutionResource toModel(TaskJobExecution taskJobExecution) { - return createModelWithId(taskJobExecution.getJobExecution().getId(), taskJobExecution); + return instantiateModel(taskJobExecution); } @Override public JobExecutionResource instantiateModel(TaskJobExecution taskJobExecution) { - return new JobExecutionResource(taskJobExecution, timeZone); + JobExecutionResource resource = new JobExecutionResource(taskJobExecution, timeZone); + try { + resource.add(linkTo(methodOn(JobExecutionController.class).view(taskJobExecution.getTaskId())).withSelfRel()); + if (taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).stopJobExecution(taskJobExecution.getJobExecution().getJobId())).withRel("stop")); + } + if (!taskJobExecution.getJobExecution().getStatus().equals(BatchStatus.COMPLETED)) { + // In this case we use null for the useJsonJobParameters parameter, so we use the configured job parameter serialization method specified by dataflow. + resource.add(linkTo(methodOn(JobExecutionController.class).restartJobExecution(taskJobExecution.getJobExecution().getJobId(), null)).withRel("restart")); + } + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + throw new RuntimeException(e); + } + return resource; } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java index 1609193fcc..a2ce3095c5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,10 +17,10 @@ package org.springframework.cloud.dataflow.server.controller; import java.util.Date; -import java.util.List; import java.util.TimeZone; import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.launch.JobExecutionNotRunningException; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.beans.factory.annotation.Autowired; @@ -31,7 +31,6 @@ import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.format.annotation.DateTimeFormat; @@ -40,17 +39,21 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for retrieving {@link JobExecution}s where the step executions are * not included in the results that are returned. * * @author Glenn Renfro + * @author Corneil du Plessis * * @since 2.0 */ @@ -68,9 +71,8 @@ public class JobExecutionThinController { * from a the {@link JobService} * * @param taskJobService the service this controller will use for retrieving job - * execution information. Must not be null. + * execution information. Must not be null. */ - @Autowired public JobExecutionThinController(TaskJobService taskJobService) { Assert.notNull(taskJobService, "taskJobService must not be null"); this.taskJobService = taskJobService; @@ -80,104 +82,106 @@ public JobExecutionThinController(TaskJobService taskJobService) { * Return a page-able list of {@link JobExecutionThinResource} defined jobs that * do not contain step execution detail. * - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return a list of Task/Job executions(job executions do not contain step executions. * @throws NoSuchJobExecutionException in the event that a job execution id specified - * is not present when looking up stepExecutions for the result. + * is not present when looking up stepExecutions for the result. */ - @RequestMapping(value = "", method = RequestMethod.GET, produces = "application/json") + @GetMapping(value = "", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel listJobsOnly(Pageable pageable, - PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { - List jobExecutions = taskJobService.listJobExecutionsWithStepCount(pageable); - Page page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutions()); - return assembler.toModel(page, jobAssembler); + PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { + Page jobExecutions = taskJobService.listJobExecutionsWithStepCount(pageable); + return assembler.toModel(jobExecutions, jobAssembler); } + /** * Retrieve all task job executions with the task name specified * - * @param jobName name of the job - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param jobName name of the job + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, params = "name", produces = "application/json") + @GetMapping(value = "", params = "name", produces = "application/json") @ResponseStatus(HttpStatus.OK) - public PagedModel retrieveJobsByName(@RequestParam("name") String jobName, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, jobName); - Page page = new PageImpl<>(jobExecutions, pageable, - taskJobService.countJobExecutionsForJob(jobName, null)); - return assembler.toModel(page, jobAssembler); + public PagedModel retrieveJobsByName( + @RequestParam("name") String jobName, + Pageable pageable, + PagedResourcesAssembler assembler) throws NoSuchJobException { + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, jobName); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the date range specified * - * @param fromDate the date which start date must be greater than. - * @param toDate the date which start date must be less than. - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param fromDate the date which start date must be greater than. + * @param toDate the date which start date must be less than. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, params = { "fromDate", - "toDate" }, produces = "application/json") + @GetMapping(value = "", params = {"fromDate", + "toDate"}, produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByDateRange( - @RequestParam("fromDate") @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date fromDate, - @RequestParam("toDate") @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date toDate, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, fromDate, - toDate); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + @RequestParam @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date fromDate, + @RequestParam @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date toDate, + Pageable pageable, + PagedResourcesAssembler assembler + ) throws NoSuchJobException { + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, fromDate, toDate); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the job instance id specified * * @param jobInstanceId the job instance id associated with the execution. - * @param pageable page-able collection of {@code TaskJobExecution}s. - * @param assembler for the {@link TaskJobExecution}s + * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, params = "jobInstanceId", produces = "application/json") + @GetMapping(value = "", params = "jobInstanceId", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByJobInstanceId( - @RequestParam("jobInstanceId") int jobInstanceId, Pageable pageable, + @RequestParam int jobInstanceId, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService + Page jobExecutions = taskJobService .listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(pageable, jobInstanceId); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the task execution id specified * * @param taskExecutionId the task execution id associated with the execution. - * @param pageable page-able collection of {@code TaskJobExecution}s. - * @param assembler for the {@link TaskJobExecution}s + * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, params = "taskExecutionId", produces = "application/json") + @GetMapping(value = "", params = "taskExecutionId", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByTaskExecutionId( - @RequestParam("taskExecutionId") int taskExecutionId, Pageable pageable, + @RequestParam int taskExecutionId, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService - .listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(pageable, taskExecutionId); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId( + pageable, + taskExecutionId + ); + return assembler.toModel(jobExecutions, jobAssembler); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link JobExecution}s to {@link JobExecutionThinResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { @@ -199,12 +203,25 @@ public void setTimeZone(TimeZone timeZone) { @Override public JobExecutionThinResource toModel(TaskJobExecution taskJobExecution) { - return createModelWithId(taskJobExecution.getJobExecution().getId(), taskJobExecution); + return instantiateModel(taskJobExecution); } @Override public JobExecutionThinResource instantiateModel(TaskJobExecution taskJobExecution) { - return new JobExecutionThinResource(taskJobExecution, timeZone); + JobExecutionThinResource resource = new JobExecutionThinResource(taskJobExecution, timeZone); + try { + resource.add(linkTo(methodOn(JobExecutionController.class).view(taskJobExecution.getTaskId())).withSelfRel()); + if (taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).stopJobExecution(taskJobExecution.getJobExecution().getJobId())).withRel("stop")); + } + if (taskJobExecution.getJobExecution().getEndTime() != null && !taskJobExecution.getJobExecution().isRunning()) { + // In this case we use null for the useJsonJobParameters parameter so we use the configured job parameter serialization method specified by dataflow. + resource.add(linkTo(methodOn(JobExecutionController.class).restartJobExecution(taskJobExecution.getJobExecution().getJobId(), null)).withRel("restart")); + } + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + throw new RuntimeException(e); + } + return resource; } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java index c58ccd6f33..9410603418 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java @@ -33,7 +33,6 @@ import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; @@ -41,9 +40,9 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -68,9 +67,8 @@ public class JobInstanceController { * Creates a {@code JobInstanceController} that retrieves Job Instance information. * * @param taskJobService the {@link TaskJobService} used for retrieving batch instance - * data. + * data. */ - @Autowired public JobInstanceController(TaskJobService taskJobService) { Assert.notNull(taskJobService, "taskJobService must not be null"); this.taskJobService = taskJobService; @@ -79,20 +77,20 @@ public JobInstanceController(TaskJobService taskJobService) { /** * Return a page-able list of {@link JobInstanceResource} defined jobs. * - * @param jobName the name of the job - * @param pageable page-able collection of {@link JobInstance}s. + * @param jobName the name of the job + * @param pageable page-able collection of {@link JobInstance}s. * @param assembler for the {@link JobInstance}s * @return a list of Job Instance * @throws NoSuchJobException if the job for jobName specified does not exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, params = "name") + @GetMapping(value = "", params = "name") @ResponseStatus(HttpStatus.OK) - public PagedModel list(@RequestParam("name") String jobName, Pageable pageable, + public PagedModel list( + @RequestParam("name") String jobName, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobInstances = taskJobService.listTaskJobInstancesForJobName(pageable, jobName); - Page page = new PageImpl<>(jobInstances, pageable, - taskJobService.countJobInstances(jobName)); - return assembler.toModel(page, jobAssembler); + Page jobInstances = taskJobService.listTaskJobInstancesForJobName(pageable, jobName); + return assembler.toModel(jobInstances, jobAssembler); } /** @@ -101,17 +99,22 @@ public PagedModel list(@RequestParam("name") String jobName * @param id the id of the requested {@link JobInstance} * @return the {@link JobInstance} * @throws NoSuchJobInstanceException if job instance for the id does not exist. - * @throws NoSuchJobException if the job for the job instance does not exist. + * @throws NoSuchJobException if the job for the job instance does not exist. */ - @RequestMapping(value = "/{id}", method = RequestMethod.GET) + @GetMapping("/{id}") @ResponseStatus(HttpStatus.OK) - public JobInstanceResource view(@PathVariable("id") long id) throws NoSuchJobInstanceException, NoSuchJobException { + public JobInstanceResource view( + @PathVariable long id + ) throws NoSuchJobInstanceException, NoSuchJobException { JobInstanceExecutions jobInstance = taskJobService.getJobInstance(id); + if (jobInstance == null) { + throw new NoSuchJobInstanceException(String.format("No job instance for id '%d'", id)); + } return jobAssembler.toModel(jobInstance); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link RepresentationModelAssemblerSupport} implementation that converts * {@link JobInstance}s to {@link JobInstanceResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java index 73a700c241..8367fd70cc 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,6 @@ import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; @@ -36,14 +35,15 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; /** * @author Glenn Renfro + * @author Corneil du Plessis */ @RestController @RequestMapping("/jobs/executions/{jobExecutionId}/steps") @@ -51,20 +51,15 @@ public class JobStepExecutionController { private final JobService jobService; - - private final Assembler stepAssembler = new Assembler(); - /** * Creates a {@code JobStepExecutionsController} that retrieves Job Step Execution * information from a the {@link JobService} * - * @param jobService the service this controller will use for retrieving job step - * execution information. + * @param jobService JobService used for this controller */ - @Autowired public JobStepExecutionController(JobService jobService) { - Assert.notNull(jobService, "repository must not be null"); - this.jobService = jobService; + Assert.notNull(jobService, "jobService required"); + this.jobService = jobService; } /** @@ -77,13 +72,17 @@ public JobStepExecutionController(JobService jobService) { * @throws NoSuchJobExecutionException if the job execution for the id specified does * not exist. */ - @RequestMapping(value = { "" }, method = RequestMethod.GET) + @GetMapping({ "" }) @ResponseStatus(HttpStatus.OK) - public PagedModel stepExecutions(@PathVariable("jobExecutionId") long id, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { - List result; - result = new ArrayList<>(jobService.getStepExecutions(id)); + public PagedModel stepExecutions( + @PathVariable("jobExecutionId") long id, + Pageable pageable, + PagedResourcesAssembler assembler + ) throws NoSuchJobExecutionException { + + List result = new ArrayList<>(jobService.getStepExecutions(id)); Page page = new PageImpl<>(result, pageable, result.size()); + final Assembler stepAssembler = new Assembler(); return assembler.toModel(page, stepAssembler); } @@ -97,27 +96,29 @@ public PagedModel stepExecutions(@PathVariable("jobExecut * @throws NoSuchJobExecutionException if the job execution for the id specified does * not exist. */ - @RequestMapping(value = { "/{stepExecutionId}" }, method = RequestMethod.GET) + @GetMapping({ "/{stepExecutionId}" }) @ResponseStatus(HttpStatus.OK) - public StepExecutionResource getStepExecution(@PathVariable("jobExecutionId") Long id, + public StepExecutionResource getStepExecution( + @PathVariable("jobExecutionId") Long id, @PathVariable("stepExecutionId") Long stepId) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - return stepAssembler.toModel(jobService.getStepExecution(id, stepId)); + StepExecution stepExecution = jobService.getStepExecution(id, stepId); + final Assembler stepAssembler = new Assembler(); + return stepAssembler.toModel(stepExecution); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link StepExecution}s to {@link StepExecutionResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { - public Assembler() { super(JobStepExecutionController.class, StepExecutionResource.class); } @Override public StepExecutionResource toModel(StepExecution stepExecution) { - return createModelWithId(stepExecution.getId(), stepExecution, stepExecution.getJobExecution().getId()); + return StepExecutionResourceBuilder.toModel(stepExecution); } @Override diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java index 40e18ca258..a32746ea03 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,64 +19,70 @@ import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.launch.NoSuchJobExecutionException; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; import org.springframework.cloud.dataflow.rest.resource.StepExecutionProgressInfoResource; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; import org.springframework.cloud.dataflow.server.job.support.StepExecutionProgressInfo; +import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; -import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * @author Glenn Renfro + * @author Corneil du Plessis */ @RestController @RequestMapping("/jobs/executions/{jobExecutionId}/steps") @ExposesResourceFor(StepExecutionProgressInfoResource.class) public class JobStepExecutionProgressController { - private final JobService jobService; + private final TaskJobService taskJobService; - private final Assembler stepAssembler = new Assembler(); + private final JobService jobService; /** * Creates a {@code JobStepProgressInfoExecutionsController} that retrieves Job Step * Progress Execution information from a the {@link JobService} * - * @param jobService the service this controller will use for retrieving job step - * progress execution information. + * @param jobService The JobService this controller will use for retrieving job step + * progress execution information. + * @param taskJobService Queries both schemas. */ - @Autowired - public JobStepExecutionProgressController(JobService jobService) { - Assert.notNull(jobService, "repository must not be null"); + public JobStepExecutionProgressController(JobService jobService, TaskJobService taskJobService) { + this.taskJobService = taskJobService; this.jobService = jobService; } /** * Get the step execution progress for the given jobExecutions step. * - * @param jobExecutionId Id of the {@link JobExecution}, must not be null + * @param jobExecutionId Id of the {@link JobExecution}, must not be null * @param stepExecutionId Id of the {@link StepExecution}, must not be null * @return {@link StepExecutionProgressInfoResource} that has the progress info on the * given {@link StepExecution}. - * @throws NoSuchJobExecutionException Thrown if the respective {@link JobExecution} - * does not exist + * @throws NoSuchJobExecutionException Thrown if the respective {@link JobExecution} + * does not exist * @throws NoSuchStepExecutionException Thrown if the respective {@link StepExecution} - * does not exist + * does not exist */ - @RequestMapping(value = "/{stepExecutionId}/progress", method = RequestMethod.GET) + @GetMapping("/{stepExecutionId}/progress") @ResponseStatus(HttpStatus.OK) - public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutionId, - @PathVariable long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException { + public StepExecutionProgressInfoResource progress( + @PathVariable long jobExecutionId, + @PathVariable long stepExecutionId + ) throws NoSuchStepExecutionException, NoSuchJobExecutionException { try { + StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId); String stepName = stepExecution.getStepName(); if (stepName.contains(":partition")) { @@ -85,12 +91,11 @@ public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutio } String jobName = stepExecution.getJobExecution().getJobInstance().getJobName(); StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName); + final Assembler stepAssembler = new Assembler(); return stepAssembler.toModel(new StepExecutionProgressInfo(stepExecution, stepExecutionHistory)); - } - catch (NoSuchStepExecutionException e) { + } catch (NoSuchStepExecutionException e) { throw new NoSuchStepExecutionException(String.valueOf(stepExecutionId)); - } - catch (NoSuchJobExecutionException e) { + } catch (NoSuchJobExecutionException e) { throw new NoSuchJobExecutionException(String.valueOf(jobExecutionId)); } } @@ -98,7 +103,7 @@ public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutio /** * Compute step execution history for the given jobs step. * - * @param jobName the name of the job + * @param jobName the name of the job * @param stepName the name of the step * @return the step execution history for the given step */ @@ -114,7 +119,7 @@ private StepExecutionHistory computeHistory(String jobName, String stepName) { } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link StepExecutionProgressInfo}s to a {@link StepExecutionProgressInfoResource}. */ private static class Assembler @@ -122,6 +127,7 @@ private static class Assembler public Assembler() { super(JobStepExecutionProgressController.class, StepExecutionProgressInfoResource.class); + } @Override @@ -132,8 +138,23 @@ public StepExecutionProgressInfoResource toModel(StepExecutionProgressInfo entit @Override protected StepExecutionProgressInfoResource instantiateModel(StepExecutionProgressInfo entity) { - return new StepExecutionProgressInfoResource(entity.getStepExecution(), entity.getStepExecutionHistory(), + StepExecutionProgressInfoResource resource = new StepExecutionProgressInfoResource(entity.getStepExecution(), entity.getStepExecutionHistory(), entity.getEstimatedPercentComplete(), entity.isFinished(), entity.getDuration()); + addLink(resource); + return resource; + } + + private void addLink(StepExecutionProgressInfoResource resource) { + try { + resource.add( + linkTo( + methodOn(JobStepExecutionProgressController.class) + .progress(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId()) + ).withRel("progress") + ); + } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) { + throw new RuntimeException(e); + } } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java index eaa91d68dc..7a73b0ee7e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java @@ -16,9 +16,8 @@ package org.springframework.cloud.dataflow.server.controller; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; - +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -95,14 +94,20 @@ public VndErrors onException(Exception e) { * * @param e one of the exceptions, {@link AppAlreadyRegisteredException}, * {@link DuplicateStreamDefinitionException}, {@link DuplicateTaskException}, - * {@link StreamAlreadyDeployedException}, {@link StreamAlreadyDeployingException}, or - * {@link StreamAlreadyDeployingException} + * {@link StreamAlreadyDeployedException}, {@link StreamAlreadyDeployingException}, + * {@link StreamAlreadyDeployingException}, or {@link ApiNotSupportedException} * @return the error response in JSON format with media type * application/vnd.error+json */ - @ExceptionHandler({ AppAlreadyRegisteredException.class, DuplicateStreamDefinitionException.class, - DuplicateTaskException.class, StreamAlreadyDeployedException.class, StreamAlreadyDeployingException.class, - UnregisterAppException.class, InvalidCTRLaunchRequestException.class}) + @ExceptionHandler({ + AppAlreadyRegisteredException.class, + DuplicateStreamDefinitionException.class, + DuplicateTaskException.class, + StreamAlreadyDeployedException.class, + StreamAlreadyDeployingException.class, + UnregisterAppException.class, + InvalidCTRLaunchRequestException.class + }) @ResponseStatus(HttpStatus.CONFLICT) @ResponseBody public VndErrors onConflictException(Exception e) { @@ -145,8 +150,8 @@ public VndErrors onUnprocessableEntityException(Exception e) { * {@link NoSuchTaskExecutionException}, {@link NoSuchJobExecutionException}, * {@link NoSuchJobInstanceException}, {@link NoSuchJobException}, * {@link NoSuchStepExecutionException}, - * {@link NoSuchAppException}, or - * {@link NoSuchAppInstanceException} + * {@link NoSuchAppException}, + * {@link NoSuchAppInstanceException}, or * @return the error response in JSON format with media type * application/vnd.error+json */ @@ -155,7 +160,7 @@ public VndErrors onUnprocessableEntityException(Exception e) { NoSuchTaskDefinitionException.class, NoSuchTaskExecutionException.class, NoSuchJobExecutionException.class, NoSuchJobInstanceException.class, NoSuchJobException.class, NoSuchStepExecutionException.class, NoSuchTaskBatchException.class, NoSuchAppException.class, NoSuchAppInstanceException.class, - NoSuchScheduleException.class }) + NoSuchScheduleException.class}) @ResponseStatus(HttpStatus.NOT_FOUND) @ResponseBody public VndErrors onNotFoundException(Exception e) { @@ -179,7 +184,7 @@ public VndErrors onNotFoundException(Exception e) { * @return the error response in JSON format with media type * application/vnd.error+json */ - @ExceptionHandler({ MissingServletRequestParameterException.class, HttpMessageNotReadableException.class, + @ExceptionHandler({ ApiNotSupportedException.class,MissingServletRequestParameterException.class, HttpMessageNotReadableException.class, UnsatisfiedServletRequestParameterException.class, MethodArgumentTypeMismatchException.class, InvalidDateRangeException.class, CannotDeleteNonParentTaskExecutionException.class, InvalidStreamDefinitionException.class, CreateScheduleException.class, OffsetOutOfBoundsException.class, @@ -193,8 +198,7 @@ public VndErrors onClientGenericBadRequest(Exception e) { } String message = null; - if (e instanceof MethodArgumentTypeMismatchException) { - final MethodArgumentTypeMismatchException methodArgumentTypeMismatchException = (MethodArgumentTypeMismatchException) e; + if (e instanceof MethodArgumentTypeMismatchException methodArgumentTypeMismatchException) { final Class requiredType = methodArgumentTypeMismatchException.getRequiredType(); final Class enumType; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java index 0ef1e22794..cd1826a86a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java @@ -42,10 +42,12 @@ import org.springframework.hateoas.RepresentationModel; import org.springframework.hateoas.server.EntityLinks; import org.springframework.hateoas.server.ExposesResourceFor; -import org.springframework.hateoas.server.mvc.WebMvcLinkBuilder; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for the root resource of the Data Flow server. * @@ -54,6 +56,7 @@ * @author Glenn Renfro * @author Mark Fisher * @author Gunnar Hillert + * @author Corneil du Plessis */ @RestController @EnableConfigurationProperties(FeaturesProperties.class) @@ -87,8 +90,8 @@ public RootController(EntityLinks entityLinks) { public RootResource info() { RootResource root = new RootResource(Version.REVISION); - root.add(WebMvcLinkBuilder.linkTo(UiController.class).withRel("dashboard")); - root.add(WebMvcLinkBuilder.linkTo(AuditRecordController.class).withRel("audit-records")); + root.add(linkTo(UiController.class).withRel("dashboard")); + root.add(linkTo(AuditRecordController.class).withRel("audit-records")); if (featuresProperties.isStreamsEnabled()) { root.add(entityLinks.linkToCollectionResource(StreamDefinitionResource.class) @@ -99,47 +102,60 @@ public RootResource info() { root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(StreamAppStatusResource.class, "{name}") .withRel("streams/validation"))); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeStreamsController.class).status(null, null, null)).withRel("runtime/streams")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeStreamsController.class).streamStatus(null, null, null)).withRel("runtime/streams/{streamNames}")); + root.add(linkTo(methodOn(RuntimeStreamsController.class).status(null, null, null)).withRel("runtime/streams")); + root.add(linkTo(methodOn(RuntimeStreamsController.class).streamStatus(null, null, null)).withRel("runtime/streams/{streamNames}")); + + root.add(linkTo(methodOn(RuntimeAppsController.class).list(null, null)).withRel("runtime/apps")); + root.add(linkTo(methodOn(RuntimeAppsController.class).display(null)).withRel("runtime/apps/{appId}")); + + root.add(linkTo(methodOn(RuntimeAppInstanceController.class).list(null, null, null)).withRel("runtime/apps/{appId}/instances")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class).display(null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppsController.class).list(null, null)).withRel("runtime/apps")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppsController.class).display(null)).withRel("runtime/apps/{appId}")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class) + .getFromActuator(null, null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}/actuator")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class) + .postToActuator(null, null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}/actuator")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppInstanceController.class).list(null, null, null)).withRel("runtime/apps/{appId}/instances")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(RuntimeAppInstanceController.class).display(null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}")); + root.add(linkTo(methodOn(RuntimeAppInstanceController.class) + .postToUrl(null,null, null, null)).withRel("runtime/apps/{appId}/instances/{instanceId}/post")); - root.add(WebMvcLinkBuilder.linkTo(StreamDeploymentController.class).withRel("streams/deployments")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).info(null, false)).withRel("streams/deployments/{name}{?reuse-deployment-properties}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).deploy(null, null)).withRel("streams/deployments/{name}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).history(null)).withRel("streams/deployments/history/{name}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).manifest(null, null)).withRel("streams/deployments/manifest/{name}/{version}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).platformList()).withRel("streams/deployments/platform/list")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).rollback(null, null)).withRel("streams/deployments/rollback/{name}/{version}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).update(null, null)).withRel("streams/deployments/update/{name}")); + root.add(linkTo(StreamDeploymentController.class).withRel("streams/deployments")); + root.add(linkTo(methodOn(StreamDeploymentController.class).info(null, false)).withRel("streams/deployments/{name}{?reuse-deployment-properties}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).deploy(null, null)).withRel("streams/deployments/{name}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).history(null)).withRel("streams/deployments/history/{name}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).manifest(null, null)).withRel("streams/deployments/manifest/{name}/{version}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).platformList()).withRel("streams/deployments/platform/list")); + root.add(linkTo(methodOn(StreamDeploymentController.class).rollback(null, null)).withRel("streams/deployments/rollback/{name}/{version}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).update(null, null)).withRel("streams/deployments/update/{name}")); root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(StreamDeploymentResource.class, "{name}").withRel("streams/deployments/deployment"))); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamDeploymentController.class).scaleApplicationInstances(null, null, null, null)).withRel("streams/deployments/scale/{streamName}/{appName}/instances/{count}")); - root.add(WebMvcLinkBuilder.linkTo(StreamLogsController.class).withRel("streams/logs")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamLogsController.class).getLog(null)).withRel("streams/logs/{streamName}")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(StreamLogsController.class).getLog(null, null)).withRel("streams/logs/{streamName}/{appName}")); + root.add(linkTo(methodOn(StreamDeploymentController.class).scaleApplicationInstances(null, null, null, null)).withRel("streams/deployments/scale/{streamName}/{appName}/instances/{count}")); + root.add(linkTo(StreamLogsController.class).withRel("streams/logs")); + root.add(linkTo(methodOn(StreamLogsController.class).getLog(null)).withRel("streams/logs/{streamName}")); + root.add(linkTo(methodOn(StreamLogsController.class).getLog(null, null)).withRel("streams/logs/{streamName}/{appName}")); } + if (featuresProperties.isTasksEnabled()) { + root.add(entityLinks.linkToCollectionResource(LauncherResource.class).withRel("tasks/platforms")); root.add(entityLinks.linkToCollectionResource(TaskDefinitionResource.class).withRel("tasks/definitions")); root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskDefinitionResource.class, "{name}") .withRel("tasks/definitions/definition"))); root.add(entityLinks.linkToCollectionResource(TaskExecutionResource.class).withRel("tasks/executions")); + root.add(linkTo(methodOn(TaskExecutionController.class).viewByExternal(null,null)).withRel("tasks/executions/external")); + root.add(linkTo(methodOn(TaskExecutionController.class).launchBoot3(null,null,null)).withRel("tasks/executions/launch")); String taskTemplated = entityLinks.linkToCollectionResource(TaskExecutionResource.class).getHref() + "{?name}"; root.add(Link.of(taskTemplated).withRel("tasks/executions/name")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(TaskExecutionController.class) + root.add(linkTo(methodOn(TaskExecutionController.class) .getCurrentTaskExecutionsInfo()).withRel("tasks/executions/current")); - root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskExecutionResource.class, "{id}") - .withRel("tasks/executions/execution"))); + root.add(unescapeTemplateVariables(linkTo(methodOn(TaskExecutionController.class).view(null)).withRel("tasks/executions/execution"))); root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskAppStatusResource.class, "{name}") .withRel("tasks/validation"))); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(TasksInfoController.class).getInfo(null, null)).withRel("tasks/info/executions")); - root.add(WebMvcLinkBuilder.linkTo(WebMvcLinkBuilder.methodOn(TaskLogsController.class).getLog(null, null)).withRel("tasks/logs")); + root.add(linkTo(methodOn(TasksInfoController.class).getInfo(null, null, null)).withRel("tasks/info/executions")); + root.add(linkTo(methodOn(TaskLogsController.class).getLog(null, null)).withRel("tasks/logs")); + root.add(linkTo(methodOn(TaskExecutionThinController.class).listTasks(null, null)).withRel("tasks/thinexecutions")); + root.add(linkTo(methodOn(TaskExecutionThinController.class).retrieveTasksByName(null,null, null)).withRel("tasks/thinexecutions/name")); if (featuresProperties.isSchedulesEnabled()) { root.add(entityLinks.linkToCollectionResource(ScheduleInfoResource.class).withRel("tasks/schedules")); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java index 179e6d4e2f..a0033cdc1b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppInstanceController.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,55 +15,80 @@ */ package org.springframework.cloud.dataflow.server.controller; +import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.cloud.dataflow.rest.resource.AppInstanceStatusResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.server.controller.support.ControllerUtils; import org.springframework.cloud.dataflow.server.stream.StreamDeployer; import org.springframework.cloud.deployer.spi.app.AppInstanceStatus; import org.springframework.cloud.deployer.spi.app.AppStatus; import org.springframework.cloud.deployer.spi.app.DeploymentState; +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.client.RestTemplate; + +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; /** * @author Mark Pollack + * @author Chris Bono */ @RestController @RequestMapping("/runtime/apps/{appId}/instances") @ExposesResourceFor(AppInstanceStatusResource.class) public class RuntimeAppInstanceController { + private final static Logger logger = LoggerFactory.getLogger(RuntimeAppInstanceController.class); - private static final Comparator INSTANCE_SORTER = new Comparator() { - @Override - public int compare(AppInstanceStatus i1, AppInstanceStatus i2) { - return i1.getId().compareTo(i2.getId()); - } - }; + private static final Comparator INSTANCE_SORTER = + (Comparator) (i1, i2) -> i1.getId().compareTo(i2.getId()); private final StreamDeployer streamDeployer; + private final RestTemplate restTemplate; + /** * Construct a new RuntimeAppInstanceController + * * @param streamDeployer the stream deployer to use */ public RuntimeAppInstanceController(StreamDeployer streamDeployer) { this.streamDeployer = streamDeployer; + this.restTemplate = new RestTemplate(); } @RequestMapping public PagedModel list(Pageable pageable, @PathVariable String appId, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler) { AppStatus status = streamDeployer.getAppStatus(appId); if (status.getState().equals(DeploymentState.unknown)) { throw new NoSuchAppException(appId); @@ -87,6 +112,89 @@ public AppInstanceStatusResource display(@PathVariable String appId, @PathVariab return new RuntimeAppInstanceController.InstanceAssembler(status).toModel(appInstanceStatus); } + @GetMapping("/{instanceId}/actuator") + public ResponseEntity getFromActuator( + @PathVariable String appId, + @PathVariable String instanceId, + @RequestParam String endpoint) { + return ResponseEntity.ok(streamDeployer.getFromActuator(appId, instanceId, endpoint)); + } + + @PostMapping("/{instanceId}/actuator") + public ResponseEntity postToActuator( + @PathVariable String appId, + @PathVariable String instanceId, + @RequestBody ActuatorPostRequest actuatorPostRequest) { + streamDeployer.postToActuator(appId, instanceId, actuatorPostRequest); + return new ResponseEntity<>(HttpStatus.CREATED); + } + + @PostMapping("/{instanceId}/post") + public ResponseEntity postToUrl( + @PathVariable String appId, + @PathVariable String instanceId, + @RequestBody String data, + @RequestHeader HttpHeaders headers) { + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("postToUrl:{}:{}:{}:{}", appId, instanceId, data, sanitizer.sanitizeHeaders(headers)); + } + AppStatus status = streamDeployer.getAppStatus(appId); + if (status.getState().equals(DeploymentState.unknown)) { + return ResponseEntity.status(HttpStatus.NOT_FOUND).body("appId not found:" + appId); + } + AppInstanceStatus appInstanceStatus = status.getInstances().get(instanceId); + if (appInstanceStatus == null) { + return ResponseEntity.status(HttpStatus.NOT_FOUND).body("instanceId not found:" + instanceId); + } + String port = appInstanceStatus.getAttributes().get("service.external.port"); + if(!StringUtils.hasText(port)) { + port = "8080"; + } + String url = String.format("http://%s:%s", appInstanceStatus.getAttributes().get("pod.ip"), port); + if (!StringUtils.hasText(url)) { + return ResponseEntity.status(HttpStatus.PRECONDITION_REQUIRED).body("url not found on resource"); + } + // TODO determine if some headers need to be removed or added + HttpEntity entity = new HttpEntity<>(data, headers); + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("postToUrl:{}:{}:{}:{}:{}", appId, instanceId, url, data, sanitizer.sanitizeHeaders(headers)); + } + waitForUrl(url, Duration.ofSeconds(30)); + ResponseEntity response = this.restTemplate.exchange(url, HttpMethod.POST, entity, String.class); + return ResponseEntity.status(response.getStatusCode()).body(response.getBody()); + } + + private void waitForUrl(String uri, Duration timeout) { + // Check + final long waitUntilMillis = System.currentTimeMillis() + timeout.toMillis(); + do { + try { + Set allowed = this.restTemplate.optionsForAllow(uri); + if (!CollectionUtils.isEmpty(allowed)) { + break; + } + } catch (Throwable x) { + logger.trace("waitForUrl:exception:" + x); + final String message = x.getMessage(); + if(message.contains("UnknownHostException")) { + logger.trace("waitForUrl:retry:exception:" + x); + continue; + } + if (message.contains("500")) { + logger.trace("waitForUrl:accepted:exception:" + x); + break; + } + } + try { + Thread.sleep(2000L); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } while (waitUntilMillis <= System.currentTimeMillis()); + } + static class InstanceAssembler extends RepresentationModelAssemblerSupport { @@ -99,7 +207,22 @@ static class InstanceAssembler @Override public AppInstanceStatusResource toModel(AppInstanceStatus entity) { - return createModelWithId("/" + entity.getId(), entity, owningApp.getDeploymentId()); + AppInstanceStatusResource resource = createModelWithId("/" + entity.getId(), entity, owningApp.getDeploymentId()); + if (logger.isDebugEnabled()) { + ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + logger.debug("toModel:{}:{}", resource.getInstanceId(), sanitizer.sanitizeProperties(resource.getAttributes())); + } + if (resource.getAttributes() != null && resource.getAttributes().containsKey("url")) { + resource.add(linkTo( + methodOn(RuntimeAppInstanceController.class).postToUrl( + owningApp.getDeploymentId(), + resource.getInstanceId(), + null, + null) + ).withRel("post")); + logger.debug("toModel:resource={}", resource.getLinks()); + } + return resource; } @Override diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java index 5bde7ff959..8efe7c89dd 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeAppsController.java @@ -113,7 +113,7 @@ protected AppStatusResource instantiateModel(AppStatus entity) { for (AppInstanceStatus appInstanceStatus : instanceStatuses) { instanceStatusResources.add(instanceAssembler.toModel(appInstanceStatus)); } - resource.setInstances(new CollectionModel<>(instanceStatusResources)); + resource.setInstances(CollectionModel.of(instanceStatusResources)); return resource; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java index 24b38edb54..462f5c0ecc 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java @@ -42,9 +42,9 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @@ -75,14 +75,18 @@ public RuntimeStreamsController(StreamDeployer streamDeployer) { } /** + * @param names The names of streams to include in result. * @param pageable the page * @param assembler the resource assembler * * @return a paged model for stream statuses */ - @RequestMapping(method = RequestMethod.GET) - public PagedModel status(@RequestParam(value = "names", required = false) String[] names, Pageable pageable, - PagedResourcesAssembler>> assembler) { + @GetMapping + public PagedModel status( + @RequestParam(required = false) String[] names, + Pageable pageable, + PagedResourcesAssembler>> assembler + ) { List streamNames = (names!= null) ? Arrays.asList(names): new ArrayList<>(); if (streamNames.isEmpty()) { streamNames = this.streamDeployer.getStreams(); @@ -117,9 +121,12 @@ private List>> getStreamStatusList(String[] streamN /** * @param streamNames comma separated list of streams to retrieve the statuses for + * @param pageable Pageable required on subsequent calls. + * @param assembler The resource assembler for the results. + * @return paged results. */ - @RequestMapping(value = "/{streamNames}", method = RequestMethod.GET) - public PagedModel streamStatus(@PathVariable("streamNames") String[] streamNames, Pageable pageable, + @GetMapping("/{streamNames}") + public PagedModel streamStatus(@PathVariable String[] streamNames, Pageable pageable, PagedResourcesAssembler>> assembler) { return assembler.toModel(new PageImpl<>(getStreamStatusList(getPagedStreamNames(pageable, Arrays.asList(streamNames))), pageable, streamNames.length), statusAssembler); @@ -157,7 +164,7 @@ private StreamStatusResource toStreamStatus(String streamName, List a } } } - streamStatusResource.setApplications(new CollectionModel<>(appStatusResources)); + streamStatusResource.setApplications(CollectionModel.of(appStatusResources)); return streamStatusResource; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java index d75f8d046f..c9be10fede 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java @@ -20,6 +20,7 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; +import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,10 +45,14 @@ import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.RepresentationModelAssembler; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -89,16 +94,16 @@ public class StreamDefinitionController { /** * Create a {@code StreamDefinitionController} that delegates to {@link StreamService}. * - * @param streamService the stream service to use - * @param streamDefinitionService the stream definition service to use - * @param appRegistryService the app registry service to use + * @param streamService the stream service to use + * @param streamDefinitionService the stream definition service to use + * @param appRegistryService the app registry service to use * @param streamDefinitionAssemblerProvider the stream definition assembler provider to use - * @param appRegistrationAssemblerProvider the app registry assembler provider to use - * */ + * @param appRegistrationAssemblerProvider the app registry assembler provider to use + */ public StreamDefinitionController(StreamService streamService, StreamDefinitionService streamDefinitionService, - AppRegistryService appRegistryService, - StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, - AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { + AppRegistryService appRegistryService, + StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, + AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { Assert.notNull(streamService, "StreamService must not be null"); Assert.notNull(streamDefinitionService, "StreamDefinitionService must not be null"); Assert.notNull(appRegistryService, "AppRegistryService must not be null"); @@ -114,40 +119,81 @@ public StreamDefinitionController(StreamService streamService, StreamDefinitionS /** * Return a page-able list of {@link StreamDefinitionResource} defined streams. * - * @param pageable Pagination information + * @param pageable Pagination information * @param assembler assembler for {@link StreamDefinition} - * @param search optional findByTaskNameContains parameter + * @param search optional findByTaskNameContains parameter * @return list of stream definitions */ - @RequestMapping(value = "", method = RequestMethod.GET) + @GetMapping("") @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, - @RequestParam(required = false) String search, PagedResourcesAssembler assembler) { + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) String search, + PagedResourcesAssembler assembler + ) { Page streamDefinitions = this.streamService.findDefinitionByNameContains(pageable, search); return assembler.toModel(streamDefinitions, this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(streamDefinitions.getContent())); } /** - * Create a new stream. + * Create a new stream and optionally deploy it. + *

    + * Differs from {@link #saveWithDeployProps} by accepting deployment properties and consuming + * {@link MediaType#APPLICATION_FORM_URLENCODED} request content (required by the Dataflow Shell). * - * @param name stream name - * @param dsl DSL definition for stream - * @param deploy if {@code true}, the stream is deployed upon creation (default is - * {@code false}) + * @param name stream name + * @param dsl DSL definition for stream + * @param deploy if {@code true}, the stream is deployed upon creation (default is + * {@code false}) * @param description description of the stream definition * @return the created stream definition * @throws DuplicateStreamDefinitionException if a stream definition with the same name - * already exists - * @throws InvalidStreamDefinitionException if there errors in parsing the stream DSL, - * resolving the name, or type of applications in the stream + * already exists + * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, + * resolving the name, or type of applications in the stream + */ + @PostMapping(value = "", consumes = MediaType.APPLICATION_FORM_URLENCODED_VALUE) + @ResponseStatus(HttpStatus.CREATED) + public StreamDefinitionResource save( + @RequestParam String name, + @RequestParam("definition") String dsl, + @RequestParam(defaultValue = "") String description, + @RequestParam(defaultValue = "false") boolean deploy + ) { + StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy, null); + return ((RepresentationModelAssembler) + this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(Collections.singletonList(streamDefinition))).toModel(streamDefinition); + } + + /** + * Create a new stream and optionally deploy it. + *

    + * Differs from {@link #save} by accepting deployment properties and consuming + * {@link MediaType#APPLICATION_JSON} request content. + * + * @param name stream name + * @param dsl DSL definition for stream + * @param deploy if {@code true}, the stream is deployed upon creation (default is + * {@code false}) + * @param deploymentProperties the optional deployment properties to use when the stream is deployed upon creation + * @param description description of the stream definition + * @return the created stream definition + * @throws DuplicateStreamDefinitionException if a stream definition with the same name + * already exists + * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, + * resolving the name, or type of applications in the stream */ - @RequestMapping(value = "", method = RequestMethod.POST) + @PostMapping(value = "", consumes = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(HttpStatus.CREATED) - public StreamDefinitionResource save(@RequestParam("name") String name, @RequestParam("definition") String dsl, - @RequestParam(value = "description", defaultValue = "") String description, - @RequestParam(value = "deploy", defaultValue = "false") boolean deploy) { - StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy); + public StreamDefinitionResource saveWithDeployProps( + @RequestParam String name, + @RequestParam("definition") String dsl, + @RequestParam(defaultValue = "") String description, + @RequestParam(defaultValue = "false") boolean deploy, + @RequestBody(required = false) Map deploymentProperties + ) { + StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy, deploymentProperties); return ((RepresentationModelAssembler) this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(Collections.singletonList(streamDefinition))).toModel(streamDefinition); } @@ -157,9 +203,9 @@ public StreamDefinitionResource save(@RequestParam("name") String name, @Request * * @param name the name of an existing stream definition (required) */ - @RequestMapping(value = "/{name}", method = RequestMethod.DELETE) + @DeleteMapping("/{name}") @ResponseStatus(HttpStatus.OK) - public void delete(@PathVariable("name") String name) { + public void delete(@PathVariable String name) { this.streamService.deleteStream(name); } @@ -167,18 +213,20 @@ public void delete(@PathVariable("name") String name) { * Return a list of related stream definition resources based on the given stream name. * Related streams include the main stream and the tap stream(s) on the main stream. * - * @param pageable Pagination information - * @param name the name of an existing stream definition (required) - * @param nested if should recursively findByTaskNameContains for related stream definitions + * @param pageable Pagination information + * @param name the name of an existing stream definition (required) + * @param nested if should recursively findByTaskNameContains for related stream definitions * @param assembler resource assembler for stream definition * @return a list of related stream definitions */ - @RequestMapping(value = "/{name}/related", method = RequestMethod.GET) + @GetMapping("/{name}/related") @ResponseStatus(HttpStatus.OK) - public PagedModel listRelated(Pageable pageable, - @PathVariable("name") String name, - @RequestParam(value = "nested", required = false, defaultValue = "false") boolean nested, - PagedResourcesAssembler assembler) { + public PagedModel listRelated( + Pageable pageable, + @PathVariable String name, + @RequestParam(required = false, defaultValue = "false") boolean nested, + PagedResourcesAssembler assembler + ) { List result = this.streamService.findRelatedStreams(name, nested); Page page = new PageImpl<>(result, pageable, result.size()); return assembler.toModel(page, @@ -192,21 +240,21 @@ public PagedModel listRelated(Pageable pagea * @param name the name of an existing stream definition (required) * @return the stream definition */ - @RequestMapping(value = "/{name}", method = RequestMethod.GET) + @GetMapping("/{name}") @ResponseStatus(HttpStatus.OK) - public StreamDefinitionResource display(@PathVariable("name") String name) { + public StreamDefinitionResource display(@PathVariable String name) { StreamDefinition streamDefinition = this.streamService.findOne(name); return this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(Collections.singletonList(streamDefinition)).toModel(streamDefinition); } - @RequestMapping(value = "/{name}/applications", method = RequestMethod.GET) + @GetMapping("/{name}/applications") @ResponseStatus(HttpStatus.OK) - public List listApplications(@PathVariable("name") String name) { + public List listApplications(@PathVariable String name) { StreamDefinition definition = this.streamService.findOne(name); LinkedList streamAppDefinitions = this.streamDefinitionService.getAppDefinitions(definition); List appRegistrations = new ArrayList<>(); - for (StreamAppDefinition streamAppDefinition: streamAppDefinitions) { + for (StreamAppDefinition streamAppDefinition : streamAppDefinitions) { AppRegistrationResource appRegistrationResource = this.appRegistryAssembler.toModel(this.appRegistryService.find(streamAppDefinition.getRegisteredAppName(), streamAppDefinition.getApplicationType())); appRegistrationResource.setLabel(streamAppDefinition.getName()); @@ -218,7 +266,7 @@ public List listApplications(@PathVariable("n /** * Request removal of all stream definitions. */ - @RequestMapping(value = "", method = RequestMethod.DELETE) + @DeleteMapping("") @ResponseStatus(HttpStatus.OK) public void deleteAll() { this.streamService.deleteAll(); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java index f57ec71239..d6d2e0829c 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java @@ -19,6 +19,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Map; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,6 +30,7 @@ import org.springframework.cloud.dataflow.rest.UpdateStreamRequest; import org.springframework.cloud.dataflow.rest.resource.DeploymentStateResource; import org.springframework.cloud.dataflow.rest.resource.StreamDeploymentResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.server.controller.support.ControllerUtils; import org.springframework.cloud.dataflow.server.repository.NoSuchStreamDefinitionException; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; @@ -42,10 +44,12 @@ import org.springframework.http.ResponseEntity; import org.springframework.util.Assert; import org.springframework.util.StringUtils; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -80,16 +84,19 @@ public class StreamDeploymentController { */ private final StreamDefinitionRepository repository; + private final ArgumentSanitizer sanitizer = new ArgumentSanitizer(); + /** * Construct a new UpdatableStreamDeploymentController, given a - * {@link StreamDeploymentController} and {@link StreamService} + * {@link StreamDeploymentController} and {@link StreamService} and {@link StreamDefinitionService} * - * @param repository the repository this controller will use for stream CRUD operations - * @param streamService the underlying UpdatableStreamService to deploy the stream + * @param repository the repository this controller will use for stream CRUD operations + * @param streamService the underlying UpdatableStreamService to deploy the stream + * @param streamDefinitionService the StreamDefinitionService */ public StreamDeploymentController(StreamDefinitionRepository repository, - StreamService streamService, - StreamDefinitionService streamDefinitionService) { + StreamService streamService, + StreamDefinitionService streamDefinitionService) { Assert.notNull(repository, "StreamDefinitionRepository must not be null"); Assert.notNull(streamService, "StreamService must not be null"); @@ -102,50 +109,61 @@ public StreamDeploymentController(StreamDefinitionRepository repository, /** * Scale application instances in a deployed stream. + * * @param streamName the name of an existing stream definition (required) - * @param appName in stream application name to scale (required) - * @param count number of instances for the selected stream application (required) + * @param appName in stream application name to scale (required) + * @param count number of instances for the selected stream application (required) * @param properties scale deployment specific properties (optional) * @return response without a body */ - @RequestMapping(value = "/scale/{streamName}/{appName}/instances/{count}", method = RequestMethod.POST) + @PostMapping("/scale/{streamName}/{appName}/instances/{count}") public ResponseEntity scaleApplicationInstances( - @PathVariable("streamName") String streamName, - @PathVariable("appName") String appName, - @PathVariable("count") Integer count, + @PathVariable String streamName, + @PathVariable String appName, + @PathVariable Integer count, @RequestBody(required = false) Map properties) { - logger.info(String.format("Scale stream: %s, apps: %s instances to %s", streamName, appName, count)); + logger.info("Scale stream: {}, apps: {} instances to {}", streamName, appName, count); this.streamService.scaleApplicationInstances(streamName, appName, count, properties); return new ResponseEntity<>(HttpStatus.CREATED); } - @RequestMapping(value = "/update/{name}", method = RequestMethod.POST) - public ResponseEntity update(@PathVariable("name") String name, - @RequestBody UpdateStreamRequest updateStreamRequest) { + @PostMapping("/update/{name}") + public ResponseEntity update(@PathVariable String name, + @RequestBody UpdateStreamRequest updateStreamRequest) { this.streamService.updateStream(name, updateStreamRequest); return new ResponseEntity<>(HttpStatus.CREATED); } - @RequestMapping(value = "/rollback/{name}/{version}", method = RequestMethod.POST) - public ResponseEntity rollback(@PathVariable("name") String name, @PathVariable("version") Integer version) { + @PostMapping("/rollback/{name}/{version}") + public ResponseEntity rollback(@PathVariable String name, @PathVariable Integer version) { this.streamService.rollbackStream(name, version); return new ResponseEntity<>(HttpStatus.CREATED); } - @RequestMapping(value = "/manifest/{name}/{version}", method = RequestMethod.GET) - public ResponseEntity manifest(@PathVariable("name") String name, - @PathVariable("version") Integer version) { + @GetMapping("/manifest/{name}/{version}") + public ResponseEntity manifest(@PathVariable String name, + @PathVariable Integer version) { return new ResponseEntity<>(this.streamService.manifest(name, version), HttpStatus.OK); } - @RequestMapping(path = "/history/{name}", method = RequestMethod.GET) + @GetMapping("/history/{name}") @ResponseStatus(HttpStatus.OK) public Collection history(@PathVariable("name") String releaseName) { - return this.streamService.history(releaseName); + return this.streamService.history(releaseName) + .stream() + .map(this::sanitizeRelease) + .collect(Collectors.toList()); + } + + private Release sanitizeRelease(Release release) { + if (release.getConfigValues() != null && StringUtils.hasText(release.getConfigValues().getRaw())) { + release.getConfigValues().setRaw(sanitizer.sanitizeJsonOrYamlString(release.getConfigValues().getRaw())); + } + return release; } - @RequestMapping(path = "/platform/list", method = RequestMethod.GET) + @GetMapping("/platform/list") @ResponseStatus(HttpStatus.OK) public Collection platformList() { return this.streamService.platformList(); @@ -157,8 +175,8 @@ public Collection platformList() { * @param name the name of an existing stream (required) * @return response without a body */ - @RequestMapping(value = "/{name}", method = RequestMethod.DELETE) - public ResponseEntity undeploy(@PathVariable("name") String name) { + @DeleteMapping("/{name}") + public ResponseEntity undeploy(@PathVariable String name) { this.repository.findById(name) .orElseThrow(() -> new NoSuchStreamDefinitionException(name)); this.streamService.undeployStream(name); @@ -167,9 +185,10 @@ public ResponseEntity undeploy(@PathVariable("name") String name) { /** * Request un-deployment of all streams. + * * @return instance of {@link ResponseEntity} */ - @RequestMapping(value = "", method = RequestMethod.DELETE) + @DeleteMapping("") public ResponseEntity undeployAll() { for (StreamDefinition stream : this.repository.findAll()) { this.streamService.undeployStream(stream.getName()); @@ -179,13 +198,17 @@ public ResponseEntity undeployAll() { /** * Request deployment of an existing stream definition. - * @param name the name of an existing stream definition (required) + * + * @param name the name of an existing stream definition (required) + * @param reuseDeploymentProperties Indicator to re-use deployment properties. * @return The stream deployment */ - @RequestMapping(value = "/{name}", method = RequestMethod.GET) + @GetMapping("/{name}") @ResponseStatus(HttpStatus.OK) - public StreamDeploymentResource info(@PathVariable("name") String name, - @RequestParam(value = "reuse-deployment-properties", required = false) boolean reuseDeploymentProperties) { + public StreamDeploymentResource info( + @PathVariable String name, + @RequestParam(value = "reuse-deployment-properties", required = false) boolean reuseDeploymentProperties + ) { StreamDefinition streamDefinition = this.repository.findById(name) .orElseThrow(() -> new NoSuchStreamDefinitionException(name)); StreamDeployment streamDeployment = this.streamService.info(name); @@ -203,14 +226,15 @@ public StreamDeploymentResource info(@PathVariable("name") String name, /** * Request deployment of an existing stream definition. - * @param name the name of an existing stream definition (required) + * + * @param name the name of an existing stream definition (required) * @param properties the deployment properties for the stream as a comma-delimited list of - * key=value pairs + * key=value pairs * @return response without a body */ - @RequestMapping(value = "/{name}", method = RequestMethod.POST) - public ResponseEntity deploy(@PathVariable("name") String name, - @RequestBody(required = false) Map properties) { + @PostMapping("/{name}") + public ResponseEntity deploy(@PathVariable String name, + @RequestBody(required = false) Map properties) { this.streamService.deployStream(name, properties); return new ResponseEntity<>(HttpStatus.CREATED); } @@ -241,8 +265,7 @@ public Assembler(String dslText, String description, String status, boolean reus public StreamDeploymentResource toModel(StreamDeployment streamDeployment) { try { return createModelWithId(streamDeployment.getStreamName(), streamDeployment); - } - catch (IllegalStateException e) { + } catch (IllegalStateException e) { logger.warn("Failed to create StreamDeploymentResource. " + e.getMessage()); } return null; @@ -255,7 +278,7 @@ public StreamDeploymentResource instantiateModel(StreamDeployment streamDeployme (StringUtils.hasText(streamDeployment.getDeploymentProperties()) && canDisplayDeploymentProperties())) { deploymentProperties = streamDeployment.getDeploymentProperties(); } - return new StreamDeploymentResource(streamDeployment.getStreamName(), + return new StreamDeploymentResource(streamDeployment.getStreamName(), streamDefinitionService.redactDsl(new StreamDefinition(streamDeployment.getStreamName(), this.dslText)), this.description, deploymentProperties, this.status); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamValidationController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamValidationController.java index f51da18b61..750323c8dd 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamValidationController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamValidationController.java @@ -23,9 +23,9 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -61,10 +61,10 @@ public StreamValidationController(StreamService streamService) { * @param name name of the stream definition * @return The status for the apps in a stream definition. */ - @RequestMapping(value = "/{name}", method = RequestMethod.GET) + @GetMapping("/{name}") @ResponseStatus(HttpStatus.OK) public StreamAppStatusResource validate( - @PathVariable("name") String name) { + @PathVariable String name) { ValidationStatus result = this.streamService.validateStream(name); return new Assembler().toModel(result); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskCtrController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskCtrController.java index 8716a59d11..f7b51fcc37 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskCtrController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskCtrController.java @@ -25,8 +25,8 @@ import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; import org.springframework.core.io.Resource; import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -56,14 +56,12 @@ public TaskCtrController(ApplicationConfigurationMetadataResolver metadataResolv this.appResourceCommon = appResourceCommon; } - @RequestMapping(value = "/options", method = RequestMethod.GET) + @GetMapping("/options") @ResponseStatus(HttpStatus.OK) public List options() { URI ctrUri = null; try { - ctrUri = new URI(composedTaskRunnerConfigurationProperties.getUri() != null - ? composedTaskRunnerConfigurationProperties.getUri() - : this.taskConfigurationProperties.getComposedTaskRunnerUri()); + ctrUri = new URI(composedTaskRunnerConfigurationProperties.getUri()); } catch (Exception e) { throw new IllegalStateException("Invalid Compose Task Runner Resource", e); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java index 8e1e0207bc..c296c2d866 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,7 +20,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.dsl.TaskNode; @@ -34,9 +37,9 @@ import org.springframework.cloud.dataflow.server.service.TaskDeleteService; import org.springframework.cloud.dataflow.server.service.TaskSaveService; import org.springframework.cloud.dataflow.server.service.impl.TaskServiceUtils; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; import org.springframework.cloud.deployer.spi.task.TaskLauncher; import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; @@ -45,9 +48,11 @@ import org.springframework.hateoas.server.RepresentationModelAssembler; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -62,6 +67,7 @@ * @author Gunnar Hillert * @author Daniel Serleg * @author Ilayaperumal Gopinathan + * @author Chris Bono */ @RestController @RequestMapping("/tasks/definitions") @@ -74,7 +80,7 @@ public class TaskDefinitionController { private final TaskDeleteService taskDeleteService; - private final TaskExplorer explorer; + private final DataflowTaskExplorer explorer; private final TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider; @@ -85,15 +91,15 @@ public class TaskDefinitionController { *

  • task status checks to the provided {@link TaskLauncher}
  • * * - * @param taskExplorer used to look up TaskExecutions. - * @param repository the repository this controller will use for task CRUD operations. - * @param taskSaveService handles Task saving related operations. - * @param taskDeleteService handles Task deletion related operations. + * @param taskExplorer used to look up TaskExecutions. + * @param repository the repository this controller will use for task CRUD operations. + * @param taskSaveService handles Task saving related operations. + * @param taskDeleteService handles Task deletion related operations. * @param taskDefinitionAssemblerProvider the task definition assembler provider to use. */ - public TaskDefinitionController(TaskExplorer taskExplorer, TaskDefinitionRepository repository, - TaskSaveService taskSaveService, TaskDeleteService taskDeleteService, - TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { + public TaskDefinitionController(DataflowTaskExplorer taskExplorer, TaskDefinitionRepository repository, + TaskSaveService taskSaveService, TaskDeleteService taskDeleteService, + TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { Assert.notNull(taskExplorer, "taskExplorer must not be null"); Assert.notNull(repository, "repository must not be null"); Assert.notNull(taskSaveService, "taskSaveService must not be null"); @@ -109,14 +115,17 @@ public TaskDefinitionController(TaskExplorer taskExplorer, TaskDefinitionReposit /** * Register a task definition for future execution. * - * @param name name the name of the task - * @param dsl DSL definition for the task + * @param name name the name of the task + * @param dsl DSL definition for the task * @param description description of the task definition * @return the task definition */ - @RequestMapping(value = "", method = RequestMethod.POST) - public TaskDefinitionResource save(@RequestParam("name") String name, @RequestParam("definition") String dsl, - @RequestParam(value = "description", defaultValue = "") String description) { + @PostMapping("") + public TaskDefinitionResource save( + @RequestParam String name, + @RequestParam("definition") String dsl, + @RequestParam(defaultValue = "") String description + ) { TaskDefinition taskDefinition = new TaskDefinition(name, dsl, description); taskSaveService.saveTaskDefinition(taskDefinition); return this.taskDefinitionAssemblerProvider.getTaskDefinitionAssembler(false).toModel(new TaskExecutionAwareTaskDefinition(taskDefinition)); @@ -125,20 +134,23 @@ public TaskDefinitionResource save(@RequestParam("name") String name, @RequestPa /** * Delete the task from the repository so that it can no longer be executed. * - * @param name name of the task to be deleted + * @param name name of the task to be deleted + * @param cleanup optional cleanup indicator. */ - @RequestMapping(value = "/{name}", method = RequestMethod.DELETE) + @DeleteMapping("/{name}") @ResponseStatus(HttpStatus.OK) - public void destroyTask(@PathVariable("name") String name, @RequestParam(required = false) Boolean cleanup) { + public void destroyTask( + @PathVariable String name, + @RequestParam(required = false) Boolean cleanup + ) { boolean taskExecutionCleanup = (cleanup != null && cleanup) ? cleanup : false; this.taskDeleteService.deleteTaskDefinition(name, taskExecutionCleanup); } /** * Delete all task from the repository. - * */ - @RequestMapping(value = "", method = RequestMethod.DELETE) + @DeleteMapping("") @ResponseStatus(HttpStatus.OK) public void destroyAll() { taskDeleteService.deleteAll(); @@ -147,49 +159,47 @@ public void destroyAll() { /** * Return a page-able list of {@link TaskDefinitionResource} defined tasks. * - * @param pageable page-able collection of {@code TaskDefinitionResource} - * @param search optional findByTaskNameContains parameter - * @param dslText optional findByDslText parameter - * @param manifest optional manifest flag to indicate whether the latest task execution requires task manifest update - * @param assembler assembler for the {@link TaskDefinition} + * @param pageable page-able collection of {@code TaskDefinitionResource} + * @param taskName optional findByTaskNameContains parameter + * @param dslText optional findByDslText parameter + * @param description optional findByDescription parameter + * @param manifest optional manifest flag to indicate whether the latest task execution requires task manifest update + * @param assembler assembler for the {@link TaskDefinition} * @return a list of task definitions */ - @RequestMapping(value = "", method = RequestMethod.GET) + @GetMapping("") @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, @RequestParam(required = false) String search, - @RequestParam(required = false) boolean manifest, @RequestParam(required = false) String dslText, - PagedResourcesAssembler assembler) { - + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) String taskName, + @RequestParam(required = false) String description, + @RequestParam(required = false) boolean manifest, + @RequestParam(required = false) String dslText, + PagedResourcesAssembler assembler + ) { final Page taskDefinitions; - if (search != null) { - if (dslText != null) { - throw new TaskQueryParamException(new String[] {"search", "dslText"}); - } else { - taskDefinitions = repository.findByTaskNameContains(search, pageable); - } - } - else { - if (dslText != null) { - taskDefinitions = repository.findByDslTextContains(dslText, pageable); - } else { - taskDefinitions = repository.findAll(pageable); - } - } - final java.util.HashMap taskDefinitionMap = new java.util.HashMap<>(); + if (Stream.of(taskName, description, dslText).filter(Objects::nonNull).count() > 1L) { + throw new TaskQueryParamException(new String[]{"taskName", "description", "dslText"}); + } - for (TaskDefinition taskDefinition : taskDefinitions) { - taskDefinitionMap.put(taskDefinition.getName(), taskDefinition); + if (taskName != null) { + taskDefinitions = repository.findByTaskNameContains(taskName, pageable); + } else if (description != null) { + taskDefinitions = repository.findByDescriptionContains(description, pageable); + } else if (dslText != null) { + taskDefinitions = repository.findByDslTextContains(dslText, pageable); + } else { + taskDefinitions = repository.findAll(pageable); } - final List taskExecutions; + final Map taskDefinitionMap = taskDefinitions + .stream() + .collect(Collectors.toMap(TaskDefinition::getTaskName, Function.identity())); + List taskExecutions = null; if (!taskDefinitionMap.isEmpty()) { - taskExecutions = this.explorer.getLatestTaskExecutionsByTaskNames( - taskDefinitionMap.keySet().toArray(new String[taskDefinitionMap.size()])); - } - else { - taskExecutions = null; + taskExecutions = this.explorer.getLatestTaskExecutionsByTaskNames(taskDefinitionMap.keySet().toArray(new String[0])); } final Page taskExecutionAwareTaskDefinitions = taskDefinitions @@ -204,12 +214,12 @@ public PagedModel list(Pageable pageable, @Req private Collection updateComposedTaskElement(Collection taskDefinitionResources, - Page taskDefinitions) { + Page taskDefinitions) { Map taskNameResources = new HashMap<>(); - for (TaskDefinitionResource taskDefinitionResource: taskDefinitionResources) { + for (TaskDefinitionResource taskDefinitionResource : taskDefinitionResources) { taskNameResources.put(taskDefinitionResource.getName(), taskDefinitionResource); } - for (TaskDefinition taskDefinition: taskDefinitions) { + for (TaskDefinition taskDefinition : taskDefinitions) { TaskParser taskParser = new TaskParser(taskDefinition.getName(), taskDefinition.getDslText(), true, true); TaskNode taskNode = taskParser.parse(); if (taskNode.isComposed()) { @@ -226,12 +236,16 @@ private Collection updateComposedTaskElement(C /** * Return a given task definition resource. * - * @param name the name of an existing task definition (required) + * @param name the name of an existing task definition (required) + * @param manifest indicator to include manifest in response. * @return the task definition */ - @RequestMapping(value = "/{name}", method = RequestMethod.GET) + @GetMapping("/{name}") @ResponseStatus(HttpStatus.OK) - public TaskDefinitionResource display(@PathVariable("name") String name, @RequestParam(required = false, name = "manifest") boolean manifest) { + public TaskDefinitionResource display( + @PathVariable String name, + @RequestParam(required = false) boolean manifest + ) { TaskDefinition definition = this.repository.findById(name) .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); final TaskExecution taskExecution = this.explorer.getLatestTaskExecutionForTaskName(name); @@ -240,8 +254,7 @@ public TaskDefinitionResource display(@PathVariable("name") String name, @Reques TaskDefinitionResource taskDefinitionResource; if (taskExecution != null) { taskDefinitionResource = taskAssembler.toModel(new TaskExecutionAwareTaskDefinition(definition, taskExecution)); - } - else { + } else { taskDefinitionResource = taskAssembler.toModel(new TaskExecutionAwareTaskDefinition(definition)); } // Identify if the task definition is a composed task element @@ -271,8 +284,7 @@ public TaskDefinitionConverter(List taskExecutions) { for (TaskExecution taskExecution : taskExecutions) { this.taskExecutions.put(taskExecution.getTaskName(), taskExecution); } - } - else { + } else { this.taskExecutions = null; } } @@ -287,10 +299,11 @@ public TaskExecutionAwareTaskDefinition apply(TaskDefinition source) { if (lastTaskExecution != null) { return new TaskExecutionAwareTaskDefinition(source, lastTaskExecution); - } - else { + } else { return new TaskExecutionAwareTaskDefinition(source); } } - }; + } + + ; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java index 9eefb20737..be3047c335 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java @@ -21,17 +21,30 @@ import java.util.Collection; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.PlatformTaskExecutionInformation; +import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.core.dsl.TaskParser; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionsInfoResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; import org.springframework.cloud.dataflow.rest.util.TaskSanitizer; +import org.springframework.cloud.dataflow.server.config.DataflowAsyncAutoConfiguration; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionControllerDeleteAction; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; @@ -39,6 +52,8 @@ import org.springframework.cloud.dataflow.server.service.TaskDeleteService; import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; +import org.springframework.cloud.dataflow.server.service.TaskJobService; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.data.domain.Page; @@ -50,14 +65,20 @@ import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; +import org.springframework.scheduling.annotation.Async; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for operations on * {@link org.springframework.cloud.task.repository.TaskExecution}. This includes @@ -69,6 +90,7 @@ * @author Christian Tzolov * @author David Turanski * @author Gunnar Hillert + * @author Corneil du Plessis */ @RestController @RequestMapping("/tasks/executions") @@ -76,60 +98,73 @@ public class TaskExecutionController { private final Assembler taskAssembler = new Assembler(); - + private final LaunchResponseAssembler launcherResponseAssembler = new LaunchResponseAssembler(); private final TaskExecutionService taskExecutionService; private final TaskExecutionInfoService taskExecutionInfoService; private final TaskDeleteService taskDeleteService; - private final TaskExplorer explorer; + private final DataflowTaskExplorer explorer; + + private final TaskJobService taskJobService; private final TaskDefinitionRepository taskDefinitionRepository; private final TaskSanitizer taskSanitizer = new TaskSanitizer(); + private final Logger logger = LoggerFactory.getLogger(TaskExecutionController.class); + + + private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); + private static final List allowedSorts = Arrays.asList("TASK_EXECUTION_ID", "START_TIME", "END_TIME", "TASK_NAME", "EXIT_CODE", "EXIT_MESSAGE", "ERROR_MESSAGE", "LAST_UPDATED", "EXTERNAL_EXECUTION_ID", - "PARENT_EXECUTION_ID"); + "PARENT_EXECUTION_ID", "SCHEMA_TARGET"); /** * Creates a {@code TaskExecutionController} that retrieves Task Execution information * from a the {@link TaskExplorer} * - * @param explorer the explorer this controller will use for retrieving task execution - * information. - * @param taskExecutionService used to launch tasks - * @param taskDefinitionRepository the task definition repository - * @param taskExecutionInfoService the task execution information service - * @param taskDeleteService the task deletion service + * @param explorer the explorer this controller will use for retrieving task execution + * information. + * @param taskExecutionService used to launch tasks + * @param taskDefinitionRepository the task definition repository + * @param taskExecutionInfoService the task execution information service + * @param taskDeleteService the task deletion service + * @param taskJobService the task job service */ - public TaskExecutionController(TaskExplorer explorer, TaskExecutionService taskExecutionService, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionInfoService taskExecutionInfoService, - TaskDeleteService taskDeleteService) { + public TaskExecutionController(DataflowTaskExplorer explorer, + TaskExecutionService taskExecutionService, + TaskDefinitionRepository taskDefinitionRepository, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeleteService taskDeleteService, + TaskJobService taskJobService) { Assert.notNull(explorer, "explorer must not be null"); Assert.notNull(taskExecutionService, "taskExecutionService must not be null"); Assert.notNull(taskDefinitionRepository, "taskDefinitionRepository must not be null"); Assert.notNull(taskExecutionInfoService, "taskDefinitionRetriever must not be null"); Assert.notNull(taskDeleteService, "taskDeleteService must not be null"); + Assert.notNull(taskJobService, "taskJobService must not be null"); this.taskExecutionService = taskExecutionService; this.explorer = explorer; this.taskDefinitionRepository = taskDefinitionRepository; this.taskExecutionInfoService = taskExecutionInfoService; this.taskDeleteService = taskDeleteService; + this.taskJobService = taskJobService; } /** * Return a page-able list of {@link TaskExecutionResource} defined tasks. * - * @param pageable page-able collection of {@code TaskExecution}s. + * @param pageable page-able collection of {@code TaskExecution}s. * @param assembler for the {@link TaskExecution}s * @return a list of task executions */ - @RequestMapping(value = "", method = RequestMethod.GET) + @GetMapping("") @ResponseStatus(HttpStatus.OK) public PagedModel list(Pageable pageable, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler) { validatePageable(pageable); Page taskExecutions = this.explorer.findAll(pageable); Page result = getPageableRelationships(taskExecutions, pageable); @@ -139,18 +174,23 @@ public PagedModel list(Pageable pageable, /** * Retrieve all task executions with the task name specified * - * @param taskName name of the task - * @param pageable page-able collection of {@code TaskExecution}s. + * @param taskName name of the task + * @param pageable page-able collection of {@code TaskExecution}s. * @param assembler for the {@link TaskExecution}s * @return the paged list of task executions */ - @RequestMapping(value = "", method = RequestMethod.GET, params = "name") + @GetMapping(value = "", params = "name") @ResponseStatus(HttpStatus.OK) - public PagedModel retrieveTasksByName(@RequestParam("name") String taskName, - Pageable pageable, PagedResourcesAssembler assembler) { + public PagedModel retrieveTasksByName( + @RequestParam("name") String taskName, + Pageable pageable, + PagedResourcesAssembler assembler + ) { + long tasks = this.taskDefinitionRepository.countByTaskName(taskName); + if(tasks == 0) { + throw new NoSuchTaskDefinitionException(taskName); + } validatePageable(pageable); - this.taskDefinitionRepository.findById(taskName) - .orElseThrow(() -> new NoSuchTaskDefinitionException(taskName)); Page taskExecutions = this.explorer.findTaskExecutionsByName(taskName, pageable); Page result = getPageableRelationships(taskExecutions, pageable); return assembler.toModel(result, this.taskAssembler); @@ -161,21 +201,35 @@ public PagedModel retrieveTasksByName(@RequestParam("name * if `spring.cloud.dataflow.task.auto-create-task-definitions` is true. * The name must be included in the path. * - * @param taskName the name of the task to be executed (required) + * @param taskName the name of the task to be executed (required) * @param properties the runtime properties for the task, as a comma-delimited list of - * key=value pairs - * @param arguments the runtime commandline arguments + * key=value pairs + * @param arguments the runtime commandline arguments * @return the taskExecutionId for the executed task */ - @RequestMapping(value = "", method = RequestMethod.POST, params = "name") + @PostMapping(value = "", params = "name") @ResponseStatus(HttpStatus.CREATED) - public long launch(@RequestParam("name") String taskName, + public long launch( + @RequestParam("name") String taskName, @RequestParam(required = false) String properties, @RequestParam(required = false) String arguments) { Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); - - return this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + LaunchResponse launchResponse = this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + return launchResponse.getExecutionId(); + } + @PostMapping(value = "/launch", params = "name") + @ResponseStatus(HttpStatus.CREATED) + public LaunchResponseResource launchBoot3( + @RequestParam("name") String taskName, + @RequestParam(required = false) String properties, + @RequestParam(required = false) String arguments + ) { + // TODO update docs and root + Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); + List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); + LaunchResponse launchResponse = this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + return this.launcherResponseAssembler.toModel(launchResponse); } /** @@ -184,23 +238,47 @@ public long launch(@RequestParam("name") String taskName, * @param id the id of the requested {@link TaskExecution} * @return the {@link TaskExecution} */ - @RequestMapping(value = "/{id}", method = RequestMethod.GET) + @GetMapping("/{id}") @ResponseStatus(HttpStatus.OK) - public TaskExecutionResource view(@PathVariable("id") long id) { - TaskExecution taskExecution = this.explorer.getTaskExecution(id); + public TaskExecutionResource view( + @PathVariable Long id) { + TaskExecution taskExecution = sanitizeTaskExecutionArguments(this.explorer.getTaskExecution(id)); if (taskExecution == null) { throw new NoSuchTaskExecutionException(id); } - taskExecution = this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution); TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(id); taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())); TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, - new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())), - taskManifest); + jobExecutionIds, + taskManifest, + getCtrTaskJobExecution(taskExecution, jobExecutionIds) + ); + return this.taskAssembler.toModel(taskJobExecutionRel); + } + @GetMapping("/external/{externalExecutionId}") + @ResponseStatus(HttpStatus.OK) + public TaskExecutionResource viewByExternal( + @PathVariable String externalExecutionId, + @RequestParam(required = false) String platform + ) { + TaskExecution taskExecution = sanitizeTaskExecutionArguments(this.explorer.getTaskExecutionByExternalExecutionId(externalExecutionId, platform)); + if (taskExecution == null) { + throw new NoSuchTaskExecutionException(externalExecutionId, platform); + } + TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId()); + taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())); + TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel( + taskExecution, + jobExecutionIds, + taskManifest, + getCtrTaskJobExecution(taskExecution, jobExecutionIds) + ); return this.taskAssembler.toModel(taskJobExecutionRel); } - @RequestMapping(value = "/current", method = RequestMethod.GET) + @GetMapping("/current") @ResponseStatus(HttpStatus.OK) public Collection getCurrentTaskExecutionsInfo() { List executionInformation = taskExecutionInfoService @@ -209,7 +287,7 @@ public Collection getCurrentTaskExecutionsInfo() executionInformation.forEach(platformTaskExecutionInformation -> { CurrentTaskExecutionsResource currentTaskExecutionsResource = - CurrentTaskExecutionsResource.fromTaskExecutionInformation(platformTaskExecutionInformation); + CurrentTaskExecutionsResource.fromTaskExecutionInformation(platformTaskExecutionInformation); resources.add(currentTaskExecutionsResource); }); @@ -221,13 +299,15 @@ public Collection getCurrentTaskExecutionsInfo() * optional {@code actions} parameter can be used to not only clean up task execution resources, * but can also trigger the deletion of task execution and job data in the persistence store. * - * @param ids The id of the {@link TaskExecution}s to clean up + * @param ids The id of the {@link TaskExecution}s to clean up * @param actions Defaults to "CLEANUP" if not specified */ - @RequestMapping(value = "/{id}", method = RequestMethod.DELETE) + @DeleteMapping("/{id}") @ResponseStatus(HttpStatus.OK) - public void cleanup(@PathVariable("id") Set ids, - @RequestParam(defaultValue = "CLEANUP", name="action") TaskExecutionControllerDeleteAction[] actions) { + public void cleanup( + @PathVariable("id") Set ids, + @RequestParam(defaultValue = "CLEANUP", name = "action") TaskExecutionControllerDeleteAction[] actions + ) { final Set actionsAsSet = new HashSet<>(Arrays.asList(actions)); this.taskDeleteService.cleanupExecutions(actionsAsSet, ids); } @@ -236,31 +316,38 @@ public void cleanup(@PathVariable("id") Set ids, * Cleanup resources associated with one or more task executions. The * optional {@code actions} and {@code completed} parameters can be used to not only clean up task execution resources, * but can also trigger the deletion of task execution and job data in the persistence store. + *

    + * When the {@code spring.cloud.dataflow.async.enabled} property is set to {@code true} the cleanup will happen + * asynchronously. * - * @param actions Defaults to "CLEANUP" if not specified - * @param completed Defaults to cleanup only completed task executions + * @param actions the actions to perform (default 'CLEANUP') + * @param completed whether to include only completed task executions (default false) + * @param taskName name of the task (default '') + * @param days only include tasks that have ended at least this many days ago (default null) */ - @RequestMapping(method = RequestMethod.DELETE) + @DeleteMapping @ResponseStatus(HttpStatus.OK) + @Async(DataflowAsyncAutoConfiguration.DATAFLOW_ASYNC_EXECUTOR) public void cleanupAll( - @RequestParam(defaultValue = "CLEANUP", name="action") TaskExecutionControllerDeleteAction[] actions, - @RequestParam(defaultValue = "false", name="completed") boolean completed, - @RequestParam(defaultValue = "", name="name") String taskName) { - - this.taskDeleteService.cleanupExecutions(new HashSet<>(Arrays.asList(actions)), - this.taskExecutionService.getAllTaskExecutionIds(completed, taskName)); + @RequestParam(defaultValue = "CLEANUP", name = "action") TaskExecutionControllerDeleteAction[] actions, + @RequestParam(defaultValue = "false") boolean completed, + @RequestParam(defaultValue = "", name = "name") String taskName, + @RequestParam(required = false) Integer days + ) { + this.taskDeleteService.cleanupExecutions(new HashSet<>(Arrays.asList(actions)), taskName, completed, days); } /** * Stop a set of task executions. * - * @param ids the ids of the {@link TaskExecution}s to stop + * @param ids the ids of the {@link TaskExecution}s to stop * @param platform the platform name */ - @RequestMapping(value = "/{id}", method = RequestMethod.POST) + @PostMapping("/{id}") @ResponseStatus(HttpStatus.OK) - public void stop(@PathVariable("id") Set ids, - @RequestParam(defaultValue = "", name="platform") String platform) { + public void stop( + @PathVariable("id") Set ids, + @RequestParam(defaultValue = "") String platform) { this.taskExecutionService.stopTaskExecution(ids, platform); } @@ -272,20 +359,47 @@ private Page getPageableRelationships(Page t List jobExecutionIds = new ArrayList<>( this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())); taskJobExecutionRels - .add(new TaskJobExecutionRel(this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution), + .add(new TaskJobExecutionRel(sanitizeTaskExecutionArguments(taskExecution), jobExecutionIds, - taskManifest)); + taskManifest, getCtrTaskJobExecution(taskExecution, jobExecutionIds))); } return new PageImpl<>(taskJobExecutionRels, pageable, taskExecutions.getTotalElements()); } + + private TaskExecution sanitizeTaskExecutionArguments(TaskExecution taskExecution) { + if (taskExecution != null) { + List args = taskExecution.getArguments().stream() + .map(this.argumentSanitizer::sanitize).collect(Collectors.toList()); + taskExecution.setArguments(args); + } + return taskExecution; + } + + private TaskJobExecution getCtrTaskJobExecution(TaskExecution taskExecution, List jobExecutionIds) { + TaskJobExecution taskJobExecution = null; + TaskDefinition taskDefinition = this.taskDefinitionRepository.findByTaskName(taskExecution.getTaskName()); + if (taskDefinition != null) { + TaskParser parser = new TaskParser(taskExecution.getTaskName(), taskDefinition.getDslText(), true, false); + if (!jobExecutionIds.isEmpty() && parser.parse().isComposed()) { + try { + taskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0]); + } catch (NoSuchJobExecutionException noSuchJobExecutionException) { + this.logger.warn("Job Execution for Task Execution {} could not be found.", + taskExecution.getExecutionId()); + } + } + } + return taskJobExecution; + } + private static void validatePageable(Pageable pageable) { if (pageable != null) { Sort sort = pageable.getSort(); if (sort != null) { for (Sort.Order order : sort) { String property = order.getProperty(); - if (property != null && !allowedSorts.contains(property.toUpperCase())) { + if (property != null && !allowedSorts.contains(property.toUpperCase(Locale.ROOT))) { throw new IllegalArgumentException("Sorting column " + order.getProperty() + " not allowed"); } } @@ -305,12 +419,26 @@ public Assembler() { @Override public TaskExecutionResource toModel(TaskJobExecutionRel taskJobExecutionRel) { - return createModelWithId(taskJobExecutionRel.getTaskExecution().getExecutionId(), taskJobExecutionRel); + + TaskExecutionResource resource = new TaskExecutionResource(taskJobExecutionRel); + resource.add( + linkTo( + methodOn(TaskLogsController.class) + .getLog(resource.getExternalExecutionId(), resource.getPlatformName()) + ).withRel("tasks/logs") + ); + + resource.add( + linkTo( + methodOn(TaskExecutionController.class) + .view(taskJobExecutionRel.getTaskExecution().getExecutionId()) + ).withSelfRel()); + return resource; } @Override public TaskExecutionResource instantiateModel(TaskJobExecutionRel taskJobExecutionRel) { - return new TaskExecutionResource(taskJobExecutionRel); + return toModel(taskJobExecutionRel); } } @@ -337,5 +465,16 @@ public TaskExecutionsInfoResource instantiateModel(Integer totalExecutions) { return taskExecutionsInfoResource; } } + private static class LaunchResponseAssembler extends RepresentationModelAssemblerSupport { + public LaunchResponseAssembler() { + super(TaskExecutionController.class, LaunchResponseResource.class); + } + @Override + public LaunchResponseResource toModel(LaunchResponse entity) { + LaunchResponseResource resource = new LaunchResponseResource(entity.getExecutionId()); + resource.add(linkTo(methodOn(TaskExecutionController.class).view(entity.getExecutionId())).withSelfRel()); + return resource; + } + } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionThinController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionThinController.java new file mode 100644 index 0000000000..e96e2c852f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionThinController.java @@ -0,0 +1,99 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.controller; + + +import org.springframework.cloud.dataflow.core.ThinTaskExecution; +import org.springframework.cloud.dataflow.rest.resource.TaskExecutionThinResource; +import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.PagedModel; +import org.springframework.hateoas.server.ExposesResourceFor; +import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +/** + * This controller provides for retrieving a thin task execution resource that will satisfy UI paging with embedded links to more detail. + * @author Corneil du Plessis + */ +@RestController +@RequestMapping("/tasks/thinexecutions") +@ExposesResourceFor(TaskExecutionThinResource.class) +public class TaskExecutionThinController { + + private final DataflowTaskExplorer explorer; + private final TaskDefinitionRepository taskDefinitionRepository; + private final TaskExecutionThinResourceAssembler resourceAssembler; + + public TaskExecutionThinController(DataflowTaskExplorer explorer, TaskDefinitionRepository taskDefinitionRepository) { + this.explorer = explorer; + this.taskDefinitionRepository = taskDefinitionRepository; + this.resourceAssembler = new TaskExecutionThinResourceAssembler(); + } + + @GetMapping(produces = "application/json") + @ResponseStatus(HttpStatus.OK) + public PagedModel listTasks(Pageable pageable, PagedResourcesAssembler pagedAssembler) { + Page page = explorer.findAll(pageable); + Page thinTaskExecutions = new PageImpl<>(page.stream().map(ThinTaskExecution::new).toList(), pageable, page.getTotalElements()); + explorer.populateCtrStatus(thinTaskExecutions.getContent()); + return pagedAssembler.toModel(thinTaskExecutions, resourceAssembler); + } + + @GetMapping(value = "", params = "name") + @ResponseStatus(HttpStatus.OK) + public PagedModel retrieveTasksByName( + @RequestParam("name") String taskName, + Pageable pageable, + PagedResourcesAssembler pagedAssembler + ) { + long tasks = this.taskDefinitionRepository.countByTaskName(taskName); + if(tasks == 0) { + throw new NoSuchTaskDefinitionException(taskName); + } + Page page = this.explorer.findTaskExecutionsByName(taskName, pageable); + Page thinTaskExecutions = new PageImpl<>(page.stream().map(ThinTaskExecution::new).toList(), pageable, page.getTotalElements()); + explorer.populateCtrStatus(thinTaskExecutions.getContent()); + return pagedAssembler.toModel(thinTaskExecutions, resourceAssembler); + } + + static class TaskExecutionThinResourceAssembler extends RepresentationModelAssemblerSupport { + public TaskExecutionThinResourceAssembler() { + super(TaskExecutionThinController.class, TaskExecutionThinResource.class); + } + @Override + public TaskExecutionThinResource toModel(ThinTaskExecution entity) { + TaskExecutionThinResource resource = new TaskExecutionThinResource(entity); + resource.add(linkTo(methodOn(TaskExecutionController.class).view(resource.getExecutionId())).withSelfRel()); + resource.add(linkTo(methodOn(TaskDefinitionController.class).display(resource.getTaskName(), true)).withRel("tasks/definitions")); + return resource; + } + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java index 4a3e37801c..eb06aa4d2f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java @@ -20,9 +20,9 @@ import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -55,9 +55,11 @@ public TaskLogsController(TaskExecutionService taskExecutionService) { * @param platformName the platform name * @return the log content represented as String */ - @RequestMapping(value = "/{taskExternalExecutionId}", method = RequestMethod.GET) + @GetMapping("/{taskExternalExecutionId}") @ResponseStatus(HttpStatus.OK) - public ResponseEntity getLog(@PathVariable String taskExternalExecutionId, @RequestParam(required = false, defaultValue = "default") String platformName) { + public ResponseEntity getLog( + @PathVariable String taskExternalExecutionId, + @RequestParam(required = false, defaultValue = "default") String platformName) { return new ResponseEntity<>(this.taskExecutionService.getLog(platformName, taskExternalExecutionId), HttpStatus.OK); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java index cd46b21afe..18ad65ff00 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java @@ -15,6 +15,8 @@ */ package org.springframework.cloud.dataflow.server.controller; +import java.util.Locale; + import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.rest.resource.LauncherResource; import org.springframework.cloud.dataflow.server.service.LauncherService; @@ -25,8 +27,8 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.StringUtils; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -52,16 +54,19 @@ public TaskPlatformController(LauncherService launcherService) { /** * Returns the list of platform accounts available for launching tasks. * @param pageable the Pageable request - * @param assembler the paged resource assembler for Launcher + * @param schedulesEnabled optional criteria to indicate enabled schedules. + * @param assembler the paged resource assembler for Launcher* * @return the paged resources of type {@link LauncherResource} */ - @RequestMapping(value = "", method = RequestMethod.GET) + @GetMapping("") @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, - @RequestParam(value = "schedulesEnabled", required = false) String schedulesEnabled, - PagedResourcesAssembler assembler) { + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) String schedulesEnabled, + PagedResourcesAssembler assembler + ) { PagedModel result; - if(StringUtils.hasText(schedulesEnabled) && schedulesEnabled.toLowerCase().equals("true")) { + if(StringUtils.hasText(schedulesEnabled) && schedulesEnabled.toLowerCase(Locale.ROOT).equals("true")) { result = assembler.toModel(this.launcherService.getLaunchersWithSchedules(pageable), this.launcherAssembler); } else { result = assembler.toModel(this.launcherService.getAllLaunchers(pageable), this.launcherAssembler); @@ -70,7 +75,7 @@ public PagedModel list(Pageable pageable, } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link Launcher}s to {@link LauncherResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java index 3f1cf5866e..f991d83882 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2019 the original author or authors. + * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,8 +19,6 @@ import java.util.List; import java.util.Map; -import org.apache.commons.lang.StringUtils; - import org.springframework.cloud.dataflow.rest.resource.ScheduleInfoResource; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; import org.springframework.cloud.dataflow.server.repository.NoSuchScheduleException; @@ -36,9 +34,11 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -80,11 +80,13 @@ public TaskSchedulerController(SchedulerService schedulerService) { * @param pageable {@link Pageable} to be used * @return a list of Schedules */ - @RequestMapping(value = "", method = RequestMethod.GET) + @GetMapping("") @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, - @RequestParam(value = "platform", required = false) String platform, - PagedResourcesAssembler assembler) { + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) String platform, + PagedResourcesAssembler assembler + ) { List result = this.schedulerService.listForPlatform(platform); return assembler.toModel(new PageImpl<>(result, pageable, result.size()), taskAssembler); } @@ -96,10 +98,12 @@ public PagedModel list(Pageable pageable, * @param platform the name of the platform from which the schedule will be retrieved. * @return a {@link ScheduleInfoResource} instance for the scheduleName specified. */ - @RequestMapping(value = "/{name}", method = RequestMethod.GET) + @GetMapping("/{name}") @ResponseStatus(HttpStatus.OK) - public ScheduleInfoResource getSchedule(@PathVariable("name") String scheduleName, - @RequestParam(value = "platform", required = false) String platform) { + public ScheduleInfoResource getSchedule( + @PathVariable("name") String scheduleName, + @RequestParam(required = false) String platform + ) { ScheduleInfo schedule = this.schedulerService.getSchedule(scheduleName, platform); if (schedule == null) { throw new NoSuchScheduleException(String.format("Schedule [%s] doesn't exist" , scheduleName)); @@ -117,9 +121,11 @@ public ScheduleInfoResource getSchedule(@PathVariable("name") String scheduleNam * @return a list of Schedules. */ @RequestMapping("/instances/{taskDefinitionName}") - public PagedModel filteredList(@PathVariable String taskDefinitionName, - @RequestParam(value = "platform", required = false) String platform, - PagedResourcesAssembler assembler) { + public PagedModel filteredList( + @PathVariable String taskDefinitionName, + @RequestParam(required = false) String platform, + PagedResourcesAssembler assembler + ) { List result = this.schedulerService.list(taskDefinitionName, platform); int resultSize = result.size(); Pageable pageable = PageRequest.of(0, @@ -133,7 +139,7 @@ public PagedModel filteredList(@PathVariable String taskDe * * @param taskDefinitionName the name of the {@link org.springframework.cloud.dataflow.core.TaskDefinition}. */ - @RequestMapping(value = "/instances/{taskDefinitionName}", method = RequestMethod.DELETE) + @DeleteMapping("/instances/{taskDefinitionName}") @ResponseStatus(HttpStatus.OK) public void deleteSchedulesforDefinition(@PathVariable String taskDefinitionName) { this.schedulerService.unscheduleForTaskDefinition(taskDefinitionName); @@ -150,16 +156,18 @@ public void deleteSchedulesforDefinition(@PathVariable String taskDefinitionName * @param arguments the runtime commandline arguments * @param platform the name of the platform for which the schedule is created. */ - @RequestMapping(value = "", method = RequestMethod.POST) + @PostMapping("") @ResponseStatus(HttpStatus.CREATED) - public void save(@RequestParam("scheduleName") String scheduleName, - @RequestParam("taskDefinitionName") String taskDefinitionName, + public void save( + @RequestParam String scheduleName, + @RequestParam String taskDefinitionName, @RequestParam String properties, @RequestParam(required = false) String arguments, - @RequestParam(value = "platform", required = false) String platform) { + @RequestParam(required = false) String platform + ) { Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); - this.schedulerService.schedule(StringUtils.trim(scheduleName), taskDefinitionName, + this.schedulerService.schedule(scheduleName.strip(), taskDefinitionName, propertiesToUse, argumentsToUse, platform); } @@ -169,10 +177,12 @@ public void save(@RequestParam("scheduleName") String scheduleName, * @param scheduleName name of the schedule to be deleted * @param platform name of the platform from which the schedule is deleted. */ - @RequestMapping(value = "/{scheduleName}", method = RequestMethod.DELETE) + @DeleteMapping("/{scheduleName}") @ResponseStatus(HttpStatus.OK) - public void unschedule(@PathVariable("scheduleName") String scheduleName, - @RequestParam(value = "platform", required = false) String platform) { + public void unschedule( + @PathVariable String scheduleName, + @RequestParam(required = false) String platform + ) { schedulerService.unschedule(scheduleName, platform); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java index c8d4ebbed8..90c324692f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java @@ -22,8 +22,8 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; @@ -58,11 +58,14 @@ public TasksInfoController(TaskExecutionService taskExecutionService) { this.taskExecutionService = taskExecutionService; } - @RequestMapping(value= "executions", method = RequestMethod.GET) + @GetMapping("executions") @ResponseStatus(HttpStatus.OK) - public TaskExecutionsInfoResource getInfo(@RequestParam(required = false, defaultValue = "false", name="completed") String completed, - @RequestParam(required = false, defaultValue = "", name="name") String taskName) { - return this.taskExecutionsAssembler.toModel(this.taskExecutionService.getAllTaskExecutionsCount(Boolean.valueOf(completed), taskName)); + public TaskExecutionsInfoResource getInfo( + @RequestParam(required = false, defaultValue = "false") String completed, + @RequestParam(required = false, defaultValue = "", name="name") String taskName, + @RequestParam(required = false) Integer days + ) { + return this.taskExecutionsAssembler.toModel(this.taskExecutionService.getAllTaskExecutionsCount(Boolean.parseBoolean(completed), taskName, days)); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ToolsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ToolsController.java index 4eaac7dd0f..7fb5c29851 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ToolsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ToolsController.java @@ -29,9 +29,9 @@ import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; /** @@ -61,7 +61,7 @@ public class ToolsController { * @param definition the map containing the task definition DSL and task name * @return a resource with the graph property set */ - @RequestMapping(value = "/parseTaskTextToGraph", method = RequestMethod.POST) + @PostMapping("/parseTaskTextToGraph") public TaskToolsResource parseTaskTextToGraph(@RequestBody Map definition) { Graph graph = null; List> errors = new ArrayList<>(); @@ -87,7 +87,7 @@ public TaskToolsResource parseTaskTextToGraph(@RequestBody Map d * @param graph the Flo Graph * @return a resource with the dsl property set */ - @RequestMapping(value = "/convertTaskGraphToText", method = RequestMethod.POST) + @PostMapping("/convertTaskGraphToText") public TaskToolsResource convertTaskGraphToText(@RequestBody Graph graph) { String dsl = null; List> errors = new ArrayList<>(); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/UiController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/UiController.java index 1f364f80ff..bf1cc53de1 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/UiController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/UiController.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2017 the original author or authors. + * Copyright 2015-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,7 +27,7 @@ * @author Gunnar Hillert */ @Controller -@RequestMapping(UiController.WEB_UI_INDEX_PAGE_ROUTE) +@RequestMapping({ UiController.WEB_UI_INDEX_PAGE_ROUTE, UiController.WEB_UI_INDEX_PAGE_ROUTE + "/" }) public class UiController { public static final String WEB_UI_INDEX_PAGE_ROUTE = "/dashboard"; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java index b01ab79d2b..819f0e6417 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java @@ -22,6 +22,8 @@ import java.util.Map; import java.util.Set; +import org.slf4j.LoggerFactory; + import org.springframework.boot.configurationmetadata.ConfigurationMetadataProperty; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; import org.springframework.cloud.dataflow.core.RelaxedNames; @@ -71,10 +73,18 @@ public Map qualifyProperties(Map properties, Res String provided = entry.getKey(); if (!allProps.contains(provided)) { List longForms = null; - for (String relaxed : new RelaxedNames(provided)) { - longForms = visible.get(relaxed); - if (longForms != null) { - break; + RelaxedNames relaxedNames = null; + try { + relaxedNames = new RelaxedNames(provided); + } catch (Exception x) { + LoggerFactory.getLogger(getClass()).error("Exception determining relaxed name for " + provided, x); + } + if(relaxedNames != null) { + for (String relaxed : relaxedNames) { + longForms = visible.get(relaxed); + if (longForms != null) { + break; + } } } if (longForms != null) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java index fb26ef836f..b9a8eba8ce 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultAppRegistrationAssembler.java @@ -38,11 +38,23 @@ public R toModel(AppRegistration registration) { @Override protected R instantiateModel(AppRegistration registration) { - AppRegistrationResource appRegistrationResource = (registration.getVersions() == null) ? new AppRegistrationResource(registration.getName(), registration.getType().name(), - registration.getVersion(), registration.getUri().toString(), registration.isDefaultVersion()) : - new AppRegistrationResource(registration.getName(), registration.getType().name(), - registration.getVersion(), registration.getUri().toString(), registration.isDefaultVersion(), - registration.getVersions()); + AppRegistrationResource appRegistrationResource = (registration.getVersions() == null) + ? new AppRegistrationResource( + registration.getName(), + registration.getType().name(), + registration.getVersion(), + registration.getUri().toString(), + registration.getMetadataUri() != null ? registration.getMetadataUri().toString() : null, + registration.isDefaultVersion() + ) : new AppRegistrationResource( + registration.getName(), + registration.getType().name(), + registration.getVersion(), + registration.getUri().toString(), + registration.getMetadataUri() != null ? registration.getMetadataUri().toString() : null, + registration.isDefaultVersion(), + registration.getVersions() + ); return (R) appRegistrationResource; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java index 1f0065fe45..1db2ba2c5d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 the original author or authors. + * Copyright 2020-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,8 +15,17 @@ */ package org.springframework.cloud.dataflow.server.controller.assembler; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; @@ -24,46 +33,86 @@ import org.springframework.cloud.dataflow.server.controller.TaskDefinitionController; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionAwareTaskDefinition; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; +import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.cloud.dataflow.server.service.impl.TaskServiceUtils; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; /** * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link TaskDefinition}s to {@link TaskDefinitionResource}s. + * + * @author Ilayaperumal Gopinathan + * @author Evgeniy Bezdomnikov + * @author Glenn Renfro + * @author Chris Bono */ public class DefaultTaskDefinitionAssembler extends RepresentationModelAssemblerSupport { + private static final Logger logger = LoggerFactory.getLogger(DefaultTaskDefinitionAssembler.class); + private final TaskExecutionService taskExecutionService; + private final TaskJobService taskJobService; + + private final DataflowTaskExplorer taskExplorer; + private final TaskSanitizer taskSanitizer = new TaskSanitizer(); private boolean enableManifest; private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); - public DefaultTaskDefinitionAssembler(TaskExecutionService taskExecutionService, boolean enableManifest, - Class classType) { + + public DefaultTaskDefinitionAssembler( + TaskExecutionService taskExecutionService, + boolean enableManifest, + Class classType, + TaskJobService taskJobService, + DataflowTaskExplorer taskExplorer) { super(TaskDefinitionController.class, classType); this.taskExecutionService = taskExecutionService; this.enableManifest = enableManifest; + this.taskJobService = taskJobService; + this.taskExplorer = taskExplorer; } TaskDefinitionResource updateTaskExecutionResource( TaskExecutionAwareTaskDefinition taskExecutionAwareTaskDefinition, TaskDefinitionResource taskDefinitionResource, boolean manifest) { - TaskExecution taskExecution = taskExecutionAwareTaskDefinition.getLatestTaskExecution(); - taskExecution = this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution); - TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId()); - taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + + TaskExecution taskExecution = this.sanitizeTaskExecutionArguments(taskExecutionAwareTaskDefinition.getLatestTaskExecution()); + TaskManifest taskManifest = null; + if (manifest) { + taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId()); + taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + } + TaskJobExecution composedTaskJobExecution = null; + if (taskExecution != null && taskDefinitionResource.isComposed()) { + Set jobExecutionIds = this.taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId()); + if(jobExecutionIds != null && jobExecutionIds.size() > 0) { + try { + composedTaskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0]); + } catch (NoSuchJobExecutionException noSuchJobExecutionException) { + logger.warn("Job Execution for Task Execution {} could not be found.", + taskExecution.getExecutionId()); + } + } + } TaskExecutionResource taskExecutionResource = (manifest && taskManifest != null) ? - new TaskExecutionResource(taskExecution, taskManifest) : - new TaskExecutionResource(taskExecution); + new TaskExecutionResource(taskExecution, taskManifest, composedTaskJobExecution) : + new TaskExecutionResource(taskExecution, composedTaskJobExecution); taskDefinitionResource.setLastTaskExecution(taskExecutionResource); return taskDefinitionResource; } - + private TaskExecution sanitizeTaskExecutionArguments(TaskExecution taskExecution) { + List args = taskExecution.getArguments().stream() + .map(this.argumentSanitizer::sanitize).collect(Collectors.toList()); + taskExecution.setArguments(args); + return taskExecution; + } @Override public R toModel(TaskExecutionAwareTaskDefinition taskExecutionAwareTaskDefinition) { return createModelWithId(taskExecutionAwareTaskDefinition.getTaskDefinition().getName(), diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java index 033fd733bb..78449f5489 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 the original author or authors. + * Copyright 2020-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,22 +17,40 @@ import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; +import org.springframework.cloud.dataflow.server.service.TaskJobService; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; +import org.springframework.util.Assert; /** * Default REST resource assembler that returns the {@link TaskDefinitionResource} type. + * * @author Ilayaperumal Gopinathan + * @author Glenn Renfro */ public class DefaultTaskDefinitionAssemblerProvider implements TaskDefinitionAssemblerProvider { private final TaskExecutionService taskExecutionService; - public DefaultTaskDefinitionAssemblerProvider(TaskExecutionService taskExecutionService) { + private final DataflowTaskExplorer taskExplorer; + + private final TaskJobService taskJobService; + + public DefaultTaskDefinitionAssemblerProvider( + TaskExecutionService taskExecutionService, + TaskJobService taskJobService, + DataflowTaskExplorer taskExplorer + ) { + Assert.notNull(taskExecutionService, "taskExecutionService required"); + Assert.notNull(taskJobService, "taskJobService required"); + Assert.notNull(taskExplorer, "taskExplorer required"); this.taskExecutionService = taskExecutionService; + this.taskJobService = taskJobService; + this.taskExplorer = taskExplorer; } @Override public DefaultTaskDefinitionAssembler getTaskDefinitionAssembler(boolean enableManifest) { return new DefaultTaskDefinitionAssembler(taskExecutionService, enableManifest, - TaskDefinitionResource.class); + TaskDefinitionResource.class, taskJobService, taskExplorer); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java index c5afcc2dc9..d26faee1e1 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/security/SecurityController.java @@ -20,18 +20,19 @@ import org.springframework.cloud.common.security.support.SecurityStateBean; import org.springframework.cloud.dataflow.rest.resource.security.SecurityInfoResource; import org.springframework.hateoas.server.ExposesResourceFor; -import org.springframework.hateoas.server.mvc.WebMvcLinkBuilder; import org.springframework.http.HttpStatus; import org.springframework.security.authentication.AnonymousAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; + /** * Provides security-related meta information. Provides one REST endpoint at present time * {@code /security/info} that provides information such as whether security is enabled @@ -61,7 +62,7 @@ public SecurityController(SecurityStateBean securityStateBean) { * @return the security info */ @ResponseBody - @RequestMapping(method = RequestMethod.GET) + @GetMapping @ResponseStatus(HttpStatus.OK) public SecurityInfoResource getSecurityInfo() { @@ -69,7 +70,7 @@ public SecurityInfoResource getSecurityInfo() { final SecurityInfoResource securityInfo = new SecurityInfoResource(); securityInfo.setAuthenticationEnabled(authenticationEnabled); - securityInfo.add(WebMvcLinkBuilder.linkTo(SecurityController.class).withSelfRel()); + securityInfo.add(linkTo(SecurityController.class).withSelfRel()); if (authenticationEnabled && SecurityContextHolder.getContext() != null) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); @@ -77,9 +78,8 @@ public SecurityInfoResource getSecurityInfo() { securityInfo.setAuthenticated(authentication.isAuthenticated()); securityInfo.setUsername(authentication.getName()); - for (Object authority : authentication.getAuthorities()) { - final GrantedAuthority grantedAuthority = (GrantedAuthority) authority; - securityInfo.addRole(grantedAuthority.getAuthority()); + for (GrantedAuthority authority : authentication.getAuthorities()) { + securityInfo.addRole(authority.getAuthority()); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java index b25f756ea4..faf91f13f0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java @@ -15,10 +15,12 @@ */ package org.springframework.cloud.dataflow.server.controller.support; +import java.util.Set; + import org.springframework.cloud.dataflow.server.controller.TaskExecutionController; /** - * This enum is used by the {@link TaskExecutionController#cleanup(java.util.Set, TaskExecutionControllerDeleteAction[])}. + * This enum is used by the {@link TaskExecutionController#cleanup(Set, TaskExecutionControllerDeleteAction[])}. * * @author Gunnar Hillert * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java new file mode 100644 index 0000000000..876555c91f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +public abstract class AbstractAggregateViewMigration extends AbstractMigration { + public AbstractAggregateViewMigration() { + super(null); + } + + public final static String CREATE_AGGREGATE_TASK_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_TASK_EXECUTION AS\n" + + " SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, 'boot2' AS SCHEMA_TARGET FROM TASK_EXECUTION\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_EXECUTION"; + + public final static String CREATE_AGGREGATE_TASK_EXECUTION_PARAMS_VIEW = "CREATE VIEW AGGREGATE_TASK_EXECUTION_PARAMS AS\n" + + " SELECT TASK_EXECUTION_ID, TASK_PARAM, 'boot2' AS SCHEMA_TARGET FROM TASK_EXECUTION_PARAMS\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, TASK_PARAM, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_EXECUTION_PARAMS"; + public final static String CREATE_AGGREGATE_JOB_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_JOB_EXECUTION AS\n" + + " SELECT JOB_EXECUTION_ID, VERSION, JOB_INSTANCE_ID, CREATE_TIME, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot2' AS SCHEMA_TARGET FROM BATCH_JOB_EXECUTION\n" + + "UNION ALL\n" + + " SELECT JOB_EXECUTION_ID, VERSION, JOB_INSTANCE_ID, CREATE_TIME, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_JOB_EXECUTION"; + public final static String CREATE_AGGREGATE_JOB_INSTANCE_VIEW = "CREATE VIEW AGGREGATE_JOB_INSTANCE AS\n" + + " SELECT JOB_INSTANCE_ID, VERSION, JOB_NAME, JOB_KEY, 'boot2' AS SCHEMA_TARGET FROM BATCH_JOB_INSTANCE\n" + + "UNION ALL\n" + + " SELECT JOB_INSTANCE_ID, VERSION, JOB_NAME, JOB_KEY, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_JOB_INSTANCE"; + public final static String CREATE_AGGREGATE_TASK_BATCH_VIEW = "CREATE VIEW AGGREGATE_TASK_BATCH AS\n" + + " SELECT TASK_EXECUTION_ID, JOB_EXECUTION_ID, 'boot2' AS SCHEMA_TARGET FROM TASK_TASK_BATCH\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, JOB_EXECUTION_ID, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_TASK_BATCH"; + public final static String CREATE_AGGREGATE_STEP_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_STEP_EXECUTION AS\n" + + " SELECT STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot2' AS SCHEMA_TARGET FROM BATCH_STEP_EXECUTION\n" + + "UNION ALL\n" + + " SELECT STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_STEP_EXECUTION"; + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(CREATE_AGGREGATE_TASK_EXECUTION_VIEW), + SqlCommand.from(CREATE_AGGREGATE_TASK_EXECUTION_PARAMS_VIEW), + SqlCommand.from(CREATE_AGGREGATE_TASK_BATCH_VIEW), + SqlCommand.from(CREATE_AGGREGATE_JOB_EXECUTION_VIEW), + SqlCommand.from(CREATE_AGGREGATE_JOB_INSTANCE_VIEW), + SqlCommand.from(CREATE_AGGREGATE_STEP_EXECUTION_VIEW)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java new file mode 100644 index 0000000000..77fc6bc0b4 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java @@ -0,0 +1,56 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +/** + * Base implementation for initial Boot 3 schema + * + * @author Chris Bono + */ +public abstract class AbstractBoot3InitialSetupMigration extends AbstractMigration { + public AbstractBoot3InitialSetupMigration() { + super(null); + } + + @Override + public List getCommands() { + List commands = new ArrayList<>(); + commands.addAll(createTask3Tables()); + commands.addAll(createBatch5Tables()); + return commands; + } + + /** + * Creates the spring-cloud-task V3 tables. + * + * @return the list of sql commands + */ + public abstract List createTask3Tables(); + + /** + * Creates the spring-batch V5 tables. + * + * @return the list of sql commands + */ + public abstract List createBatch5Tables(); + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBootVersionMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBootVersionMigration.java new file mode 100644 index 0000000000..a014eb1a5e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBootVersionMigration.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Collections; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public abstract class AbstractBootVersionMigration extends AbstractMigration { + private static final String ADD_BOOT_VERSION = "alter table app_registration add boot_version varchar(16)"; + + public AbstractBootVersionMigration() { + super(null); + } + + @Override + public List getCommands() { + return Collections.singletonList(SqlCommand.from(ADD_BOOT_VERSION)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java new file mode 100644 index 0000000000..ed9b01321c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; + +/** + * Provides for converting lower case table names to uppercase to ensure it works correctly with a MariaDB or MySQL installation with case-sensitive table or column names. + * @author Corneil du Plessis + */ +public abstract class AbstractCaseSensitiveMigration extends AbstractMigration { + protected final static String RENAME_TASK_EXECUTION_METADATA_LC = "alter table task_execution_metadata rename to task_execution_metadata_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA = "alter table task_execution_metadata_lc rename to TASK_EXECUTION_METADATA"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL = "alter table task_execution_metadata_seq rename to task_execution_metadata_seq_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_TBL = "alter table task_execution_metadata_seq_lc rename to TASK_EXECUTION_METADATA_SEQ"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_LC = "alter sequence task_execution_metadata_seq rename to task_execution_metadata_seq_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ = "alter sequence task_execution_metadata_seq_lc rename to TASK_EXECUTION_METADATA_SEQ"; + + protected final static String CREATE_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "CREATE SEQUENCE task_execution_metadata_seq_lc"; + + protected final static String ALTER_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "select setval(task_execution_metadata_seq_lc, select nextval(task_execution_metadata_seq), false)"; + + protected final static String DROP_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "drop sequence task_execution_metadata_seq"; + + protected final static String CREATE_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "create sequence TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + protected final static String ALTER_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "select setval(TASK_EXECUTION_METADATA_SEQ, select nextval(task_execution_metadata_seq_lc), false)"; + + protected final static String DROP_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "drop sequence task_execution_metadata_seq_lc"; + + public AbstractCaseSensitiveMigration() { + super(null); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateBatchIndexesMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateBatchIndexesMigration.java new file mode 100644 index 0000000000..bedebafbe3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateBatchIndexesMigration.java @@ -0,0 +1,55 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +/** + * Provide indexes to improve aggregate view performance + * @author Corneil du Plessis + */ +public abstract class AbstractCreateBatchIndexesMigration extends AbstractMigration { + protected static final String CREATE_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX = + "create index BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_IX on BATCH_STEP_EXECUTION(JOB_EXECUTION_ID)"; + protected static final String CREATE_BOOT3_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX = + "create index BOOT3_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_IX on BOOT3_BATCH_STEP_EXECUTION(JOB_EXECUTION_ID)"; + protected static final String CREATE_BOOT3_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX = + "create index BOOT3_TASK_TASK_BATCH_JOB_EXECUTION_ID_IX on BOOT3_TASK_TASK_BATCH(JOB_EXECUTION_ID)"; + protected static final String CREATE_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX = + "create index TASK_TASK_BATCH_JOB_EXECUTION_ID_IX on TASK_TASK_BATCH(JOB_EXECUTION_ID)"; + protected static final String CREATE_BATCH_JOB_EXECUTION_START_TIME_INDEX = + "create index BATCH_JOB_EXECUTION_START_TIME_IX on BATCH_JOB_EXECUTION(START_TIME)"; + protected static final String CREATE_BOOT3_BATCH_JOB_EXECUTION_START_TIME_INDEX = + "create index BOOT3_BATCH_JOB_EXECUTION_START_TIME_IX on BOOT3_BATCH_JOB_EXECUTION(START_TIME)"; + + public AbstractCreateBatchIndexesMigration() { + super(null); + } + + @Override + public List getCommands() { + return Arrays.asList(SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_BOOT3_BATCH_STEP_EXECUTION_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_BOOT3_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_TASK_TASK_BATCH_JOB_EXECUTION_ID_INDEX), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_START_TIME_INDEX), + SqlCommand.from(CREATE_BOOT3_BATCH_JOB_EXECUTION_START_TIME_INDEX)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateTaskParentIndexMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateTaskParentIndexMigration.java new file mode 100644 index 0000000000..7d2175f597 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCreateTaskParentIndexMigration.java @@ -0,0 +1,45 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +/** + * Provide indexes to improve performance of finding child tasks. + * @author Corneil du Plessis + */ +public abstract class AbstractCreateTaskParentIndexMigration extends AbstractMigration { + protected static final String CREATE_TASK_PARENT_INDEX = + "create index TASK_EXECUTION_PARENT_IX on TASK_EXECUTION(PARENT_EXECUTION_ID)"; + protected static final String CREATE_BOOT3_TASK_PARENT_INDEX = + "create index BOOT3_TASK_EXECUTION_PARENT_IX on BOOT3_TASK_EXECUTION(PARENT_EXECUTION_ID)"; + + public AbstractCreateTaskParentIndexMigration() { + super(null); + } + + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_PARENT_INDEX), + SqlCommand.from(CREATE_BOOT3_TASK_PARENT_INDEX) + ); + } +} \ No newline at end of file diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractRemoveBatch4Task2Tables.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractRemoveBatch4Task2Tables.java new file mode 100644 index 0000000000..eae7e8e71b --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractRemoveBatch4Task2Tables.java @@ -0,0 +1,81 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.ArrayList; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +/** + * Base implementation for removing Task v2 and Batch v4 schema. + * Also removing the BOOT3_ prefix from Batch v5 and Task v3 tables. + * + * @author Glenn Renfro + */ +public abstract class AbstractRemoveBatch4Task2Tables extends AbstractMigration { + public AbstractRemoveBatch4Task2Tables() { + super(null); + } + + @Override + public List getCommands() { + List commands = new ArrayList<>(); + commands.addAll(dropBoot3Boot2Views()); + commands.addAll(renameTask2Tables()); + commands.addAll(renameBatch4Tables()); + commands.addAll(renameTask3Tables()); + commands.addAll(renameBatch5Tables()); + return commands; + } + + /** + * Renames the spring-cloud-task V3 tables removing the BOOT3_ prefix. + * + * @return the list of sql commands + */ + public abstract List renameTask3Tables(); + + /** + * Renames the spring batch V5 tables removing the BOOT3_ prefix. + * + * @return the list of sql commands + */ + public abstract List renameBatch5Tables(); + + /** + * Renames the spring-cloud-task V2 tables adding a V2_ prefix. + * + * @return the list of sql commands + */ + public abstract List renameTask2Tables(); + + /** + * Renames the spring batch V4 tables adding a V2_ prefix. + * + * @return the list of sql commands + */ + public abstract List renameBatch4Tables(); + + /** + * Removes views for TaskV2/BatchV4 TaskV3/BatchV5 views. + * + * @return the list of sql commands + */ + public abstract List dropBoot3Boot2Views(); + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java index ba59ef1f0a..463c6fc242 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DataFlowFlywayConfigurationCustomizer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,36 +18,44 @@ import javax.sql.DataSource; import org.flywaydb.core.api.configuration.FluentConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.flyway.FlywayConfigurationCustomizer; import org.springframework.boot.jdbc.DatabaseDriver; +import org.springframework.cloud.dataflow.common.flyway.DatabaseDriverUtils; import org.springframework.cloud.dataflow.server.db.migration.db2.Db2BeforeBaseline; +import org.springframework.cloud.dataflow.server.db.migration.mariadb.MariadbBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.mysql.MysqlBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.oracle.OracleBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.postgresql.PostgresBeforeBaseline; import org.springframework.cloud.dataflow.server.db.migration.sqlserver.MsSqlBeforeBaseline; -import org.springframework.jdbc.support.JdbcUtils; -import org.springframework.jdbc.support.MetaDataAccessException; /** * Flyway {@link FlywayConfigurationCustomizer} bean customizing callbacks per * active db vendor. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class DataFlowFlywayConfigurationCustomizer implements FlywayConfigurationCustomizer { + private static final Logger LOG = LoggerFactory.getLogger(DataFlowFlywayConfigurationCustomizer.class); + @Override public void customize(FluentConfiguration configuration) { - // boot's flyway auto-config doesn't allow to define callbacks per + // Boot's flyway auto-config doesn't allow to define callbacks per // vendor id, so essentially customizing those here. DataSource dataSource = configuration.getDataSource(); - DatabaseDriver databaseDriver = getDatabaseDriver(dataSource); + DatabaseDriver databaseDriver = DatabaseDriverUtils.getDatabaseDriver(dataSource); + LOG.info("Adding vendor specific Flyway callback for {}", databaseDriver.name()); if (databaseDriver == DatabaseDriver.POSTGRESQL) { configuration.callbacks(new PostgresBeforeBaseline()); } - else if (databaseDriver == DatabaseDriver.MYSQL || databaseDriver == DatabaseDriver.MARIADB) { + else if (databaseDriver == DatabaseDriver.MARIADB) { + configuration.callbacks(new MariadbBeforeBaseline()); + } + else if (databaseDriver == DatabaseDriver.MYSQL) { configuration.callbacks(new MysqlBeforeBaseline()); } else if (databaseDriver == DatabaseDriver.SQLSERVER) { @@ -60,15 +68,4 @@ else if (databaseDriver == DatabaseDriver.DB2) { configuration.callbacks(new Db2BeforeBaseline()); } } - - private DatabaseDriver getDatabaseDriver(DataSource dataSource) { - // copied from boot's flyway auto-config to get matching db vendor id - try { - String url = JdbcUtils.extractDatabaseMetaData(dataSource, "getURL"); - return DatabaseDriver.fromJdbcUrl(url); - } - catch (MetaDataAccessException ex) { - throw new IllegalStateException(ex); - } - } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DropColumnSqlCommands.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DropColumnSqlCommands.java new file mode 100644 index 0000000000..570e8a300a --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/DropColumnSqlCommands.java @@ -0,0 +1,114 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Utility class that can be used in future to drop columns. + * This checks for the existence of the column before dropping. + * @author Corneil du Plessis + */ +public class DropColumnSqlCommands extends SqlCommand { + private final static Logger logger = LoggerFactory.getLogger(DropColumnSqlCommands.class); + + private final List columnNames = new ArrayList<>(); + + public DropColumnSqlCommands(String... columnName) { + columnNames.addAll(Arrays.asList(columnName)); + } + + @Override + public void handle(JdbcTemplate jdbcTemplate, Connection connection) { + for(String name : columnNames) { + try { + dropColumn(jdbcTemplate, connection, name); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public boolean canHandleInJdbcTemplate() { + return true; + } + + protected void dropColumn(JdbcTemplate jdbcTemplate, Connection connection, String name) throws SQLException { + logger.debug("dropping:{}", name); + String [] parts = StringUtils.split(name, "."); + Assert.notNull(parts, "Expected 2 or more parts from " + name); + Assert.isTrue(parts.length > 1, "Expected 2 or more parts from " + name); + String columnName = parts[parts.length - 1]; + String tableName = parts[parts.length - 2]; + String schemaName = parts.length > 2 ? parts[parts.length - 3] : null; + logger.debug("Searching for {}.{}", tableName, columnName); + if(hasColumn(connection, schemaName, tableName, columnName)) { + String sql = String.format("alter table %s drop column %s", tableName, columnName); + logger.debug("Executing: {}", sql); + jdbcTemplate.execute(sql); + } + } + protected boolean hasColumn(Connection connection, String schemaName, String tableName, String columnName) throws SQLException { + String actualSchemaName = null; + if(StringUtils.hasText(schemaName)) { + try(ResultSet resultSet = connection.getMetaData().getSchemas()) { + while (resultSet.next()) { + String name = resultSet.getString("SCHEMA_NAME"); + // determine the actual name used in specific database metadata. + if(name.equalsIgnoreCase(schemaName)) { + actualSchemaName = name; + break; + } + } + } + } + String actualTableName = tableName; + try(ResultSet resultSet = connection.getMetaData().getTables(null, actualSchemaName, null, new String[] {"TABLE"})) { + while (resultSet.next()) { + String name = resultSet.getString("TABLE_NAME"); + // determine the actual name used in specific database metadata. + if(name.equalsIgnoreCase(tableName)) { + actualTableName = name; + break; + } + } + } + // actual names need to be same case as reported by meta data query for some databases. + try (ResultSet resultSet = connection.getMetaData().getColumns(null, actualSchemaName, actualTableName, null)) { + while (resultSet.next()) { + String foundColumnName = resultSet.getString("COLUMN_NAME"); + if (foundColumnName.equalsIgnoreCase(columnName)) { + return true; + } + } + } + return false; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java new file mode 100644 index 0000000000..fe816f73d2 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java @@ -0,0 +1,163 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration; + +import java.sql.Connection; +import java.sql.JDBCType; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.util.Assert; + +/** + * Provides for converting text or longtext fields in PostgreSQL to OID. + * + * @author Corneil du Plessis + */ +public class PostgreSQLTextToOID { + private final static Logger logger = LoggerFactory.getLogger(PostgreSQLTextToOID.class); + + private final static String ADD_TMP_OID_COL = "alter table %s add column %s oid"; + + private final static String ADD_TMP_TEXT_COL = "alter table %s add column %s text"; + + private final static String UPDATE_TMP_OID_COL = "update %s set %s = lo_from_bytea(0, %s::bytea), %s = null where %s in (select %s from %s where %s is null and %s is not null limit 100)"; + + private final static String UPDATE_TMP_TEXT_COL = "update %s set %s = convert_from(lo_get(cast(%s as bigint)),'UTF8'), %s = null where %s in (select %s from %s where %s is null and %s is not null limit 100)"; + + private final static String DROP_ORIGINAL_COL = "alter table %s drop column %s"; + + private final static String RENAME_TMP_COL = "alter table %s rename column %s to %s"; + + public static void convertColumnToOID(String table, String id, String column, DataSource dataSource) { + + try (Connection connection = dataSource.getConnection()) { + String tableName = table; + try(ResultSet tables = connection.getMetaData().getTables(null, null, null, null)) { + while(tables.next()) { + String name = tables.getString("TABLE_NAME"); + if(name.equalsIgnoreCase(table)) { + tableName = name; + break; + } + } + } + logger.debug("searching:{}", tableName); + try (ResultSet resultSet = connection.getMetaData().getColumns(null, null, tableName, null)) { + int count = 0; + while (resultSet.next()) { + String columnName = resultSet.getString("COLUMN_NAME"); + if(columnName.equalsIgnoreCase(column)) { + count++; + int dataType = resultSet.getInt("DATA_TYPE"); + logger.info("Found {}:{}:{}", table, column, JDBCType.valueOf(dataType)); + if (dataType == Types.BIGINT) { + return; + } + } + } + Assert.isTrue(count > 0, "Cannot find " + table + ":" + column); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + JdbcTemplate template = new JdbcTemplate(dataSource); + final String tmp_col = column + "_tmp"; + String sqlTmp = String.format(ADD_TMP_OID_COL, table, tmp_col); + logger.debug("Executing:{}", sqlTmp); + template.update(sqlTmp); + int total = 0; + do { + String sql = String.format(UPDATE_TMP_OID_COL, table, tmp_col, column, column, id, id, table, tmp_col, column); + logger.debug("Executing:{}", sql); + int count = template.update(sql); + total += count; + if (count <= 0) { + logger.info("Updated {} rows of {} in {}", total, column, table); + break; + } + } while (true); + String sqlDrop = String.format(DROP_ORIGINAL_COL, table, column); + logger.debug("Executing:{}", sqlDrop); + template.update(sqlDrop); + String sqlRename = String.format(RENAME_TMP_COL, table, tmp_col, column); + logger.debug("Executing:{}", sqlRename); + template.update(sqlRename); + } + + public static void convertColumnFromOID(String table, String id, String column, DataSource dataSource) { + try (Connection connection = dataSource.getConnection()) { + String tableName = table; + try(ResultSet tables = connection.getMetaData().getTables(null, null, null, null)) { + while(tables.next()) { + String name = tables.getString("TABLE_NAME"); + if(name.equalsIgnoreCase(table)) { + tableName = name; + break; + } + } + } + logger.debug("searching:{}", tableName); + try (ResultSet resultSet = connection.getMetaData().getColumns(null, null, tableName, null)) { + int count = 0; + while (resultSet.next()) { + String columnName = resultSet.getString("COLUMN_NAME"); + if(columnName.equalsIgnoreCase(column)) { + count++; + int dataType = resultSet.getInt("DATA_TYPE"); + logger.info("Found {}:{}:{}", table, column, JDBCType.valueOf(dataType)); + if (dataType != Types.BIGINT) { + return; + } + } + } + Assert.isTrue(count > 0, "Cannot find " + table + ":" + column); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + JdbcTemplate template = new JdbcTemplate(dataSource); + final String tmp_col = column + "_tmp"; + String sqlTmp = String.format(ADD_TMP_TEXT_COL, table, tmp_col); + logger.debug("Executing:{}", sqlTmp); + template.update(sqlTmp); + int total = 0; + do { + String sql = String.format(UPDATE_TMP_TEXT_COL, table, tmp_col, column, column, id, id, table, tmp_col, column); + logger.debug("Executing:{}", sql); + int count = template.update(sql); + total += count; + if (count <= 0) { + logger.info("Updated {} rows of {} in {}", total, column, table); + break; + } + } while (true); + String sqlDrop = String.format(DROP_ORIGINAL_COL, table, column); + logger.debug("Executing:{}", sqlDrop); + template.update(sqlDrop); + String sqlRename = String.format(RENAME_TMP_COL, table, tmp_col, column); + logger.debug("Executing:{}", sqlRename); + template.update(sqlRename); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V10__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V10__CreateBatchIndexes.java new file mode 100644 index 0000000000..970fa855f3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V10__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V10__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V11__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V11__CreateTaskParentIndex.java new file mode 100644 index 0000000000..155a7e11c9 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V11__CreateTaskParentIndex.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V11__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V12__Remove_Task2_Batch4_Support.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V12__Remove_Task2_Batch4_Support.java new file mode 100644 index 0000000000..2895b3452b --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V12__Remove_Task2_Batch4_Support.java @@ -0,0 +1,846 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractRemoveBatch4Task2Tables; + +/** + * Prefixes Task V2 tables and V4 Batch tables with a V2_ prefix as well as remove the BOOT3_ prefix for V3 task and v5 batch tables. + * + * @author Glenn Renfro + */ +public class V12__Remove_Task2_Batch4_Support extends AbstractRemoveBatch4Task2Tables { + + /* + * Scripts to remove views used for Task V2/Batch V4 Task V3/Batch V5 queries. + */ + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION = + "DROP VIEW AGGREGATE_TASK_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS = + "DROP VIEW AGGREGATE_TASK_EXECUTION_PARAMS"; + + private final static String DROP_VIEW_AGGREGATE_JOB_EXECUTION = + "DROP VIEW AGGREGATE_JOB_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_JOB_INSTANCE = + "DROP VIEW AGGREGATE_JOB_INSTANCE"; + + private final static String DROP_VIEW_AGGREGATE_TASK_BATCH = + "DROP VIEW AGGREGATE_TASK_BATCH"; + + private final static String DROP_VIEW_AGGREGATE_STEP_EXECUTION = + "DROP VIEW AGGREGATE_STEP_EXECUTION"; + + /* + * Scripts to rename table Task V2 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V2_TABLE = + """ + CREATE TABLE V2_TASK_EXECUTION ( + TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + TASK_NAME VARCHAR(100), + EXIT_CODE INTEGER, + EXIT_MESSAGE VARCHAR(2500), + ERROR_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + EXTERNAL_EXECUTION_ID VARCHAR(255), + PARENT_EXECUTION_ID BIGINT + ); + """; + private final static String POPULATE_TASK_EXECUTION_V2_TABLE = + """ + INSERT INTO V2_TASK_EXECUTION + SELECT * FROM TASK_EXECUTION; + """; + + private final static String CLEANUP_TASK_EXECUTION_V2_TABLE = + """ + DROP TABLE TASK_EXECUTION; + """; + private final static String RENAME_TASK_EXECUTION_PARAMS_V2_TABLE = + """ + CREATE TABLE V2_TASK_EXECUTION_PARAMS ( + TASK_EXECUTION_ID BIGINT NOT NULL, + TASK_PARAM VARCHAR(2500), + CONSTRAINT TASK_EXEC_PARAMS_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES V2_TASK_EXECUTION(TASK_EXECUTION_ID) + ) + """; + private final static String POPULATE_TASK_EXECUTION_PARAMS_V2_TABLE = + """ + INSERT INTO V2_TASK_EXECUTION_PARAMS + SELECT * FROM TASK_EXECUTION_PARAMS; + """; + private final static String CLEANUP_TASK_EXECUTION_PARAMS_V2_TABLE = + """ + DROP TABLE TASK_EXECUTION_PARAMS; + """; + + private final static String RENAME_TASK_TASK_BATCH_V2_TABLE = + """ + CREATE TABLE V2_TASK_TASK_BATCH ( + TASK_EXECUTION_ID BIGINT NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + CONSTRAINT TASK_EXEC_BATCH_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES V2_TASK_EXECUTION(TASK_EXECUTION_ID) + ); + """; + private final static String POPULATE_TASK_TASK_BATCH_V2_TABLE = + """ + INSERT INTO V2_TASK_TASK_BATCH + SELECT * FROM TASK_TASK_BATCH; + """; + + private final static String CLEANUP_TASK_TASK_BATCH_V2_TABLE = + """ + DROP TABLE TASK_TASK_BATCH; + """; + private final static String RENAME_TASK_LOCK_V2_TABLE = + """ + CREATE TABLE V2_TASK_LOCK ( + LOCK_KEY CHAR(36) NOT NULL, + REGION VARCHAR(100) NOT NULL, + CLIENT_ID CHAR(36), + CREATED_DATE TIMESTAMP NOT NULL, + CONSTRAINT LOCK_PK PRIMARY KEY (LOCK_KEY, REGION) + ); + """; + private final static String POPULATE_TASK_LOCK_V2_TABLE = + """ + INSERT INTO V2_TASK_LOCK + SELECT * FROM TASK_LOCK; + """; + + private final static String CLEANUP_TASK_LOCK_V2_TABLE = + """ + DROP TABLE TASK_LOCK; + """; + private final static String RENAME_TASK_V2_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for TASK_SEQ; + execute immediate 'CREATE sequence V2_TASK_SEQ start with ' || newSequenceStart; + end; + """; + private final static String CLEANUP_TASK_V2_SEQ = + """ + DROP SEQUENCE TASK_SEQ; + """; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_TABLE = + """ + CREATE TABLE V2_TASK_EXECUTION_METADATA ( + ID BIGINT NOT NULL, + TASK_EXECUTION_ID BIGINT NOT NULL, + TASK_EXECUTION_MANIFEST CLOB, + PRIMARY KEY (ID), + CONSTRAINT V2_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES V2_TASK_EXECUTION (TASK_EXECUTION_ID) + ); + """; + private final static String POPULATE_TASK_EXECUTION_METADATA_V2_TABLE = + """ + INSERT INTO V2_TASK_EXECUTION_METADATA (ID, TASK_EXECUTION_ID, TASK_EXECUTION_MANIFEST) + SELECT ID, TASK_EXECUTION_ID, TASK_EXECUTION_MANIFEST + FROM TASK_EXECUTION_METADATA; + """; + private final static String CLEANUP_TASK_EXECUTION_METADATA_V2_TABLE = + """ + DROP TABLE TASK_EXECUTION_METADATA; + """; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for TASK_EXECUTION_METADATA_SEQ; + execute immediate 'CREATE sequence V2_TASK_EXECUTION_METADATA_SEQ start with ' || newSequenceStart; + end; + """; + private final static String CLEANUP_TASK_EXECUTION_METADATA_V2_SEQ = + """ + DROP SEQUENCE TASK_EXECUTION_METADATA_SEQ; + """; + + /* + * Scripts to rename table Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V4_TABLE = + """ + CREATE TABLE V2_BATCH_JOB_INSTANCE ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL, + JOB_KEY VARCHAR(32) NOT NULL, + CONSTRAINT JOB_INST_UN UNIQUE (JOB_NAME, JOB_KEY) + ); + """; + private final static String POPULATE_BATCH_JOB_INSTANCE_V4_TABLE = + """ + INSERT INTO V2_BATCH_JOB_INSTANCE + SELECT * FROM BATCH_JOB_INSTANCE; + """; + + private final static String CLEANUP_BATCH_JOB_INSTANCE_V4_TABLE = + """ + DROP TABLE BATCH_JOB_INSTANCE; + """; + private final static String RENAME_BATCH_JOB_EXECUTION_V4_TABLE = + """ + CREATE TABLE V2_BATCH_JOB_EXECUTION ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + JOB_CONFIGURATION_LOCATION VARCHAR(2500) DEFAULT NULL, + CONSTRAINT JOB_INST_EXEC_FK FOREIGN KEY (JOB_INSTANCE_ID) REFERENCES V2_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID) + ); + """; + private final static String POPULATE_BATCH_JOB_EXECUTION_V4_TABLE = + """ + INSERT INTO V2_BATCH_JOB_EXECUTION + SELECT * FROM BATCH_JOB_EXECUTION; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_V4_TABLE = + """ + DROP TABLE BATCH_JOB_EXECUTION; + """; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + """ + CREATE TABLE V2_BATCH_JOB_EXECUTION_PARAMS ( + JOB_EXECUTION_ID BIGINT NOT NULL, + TYPE_CD VARCHAR(6) NOT NULL, + KEY_NAME VARCHAR(100) NOT NULL, + STRING_VAL VARCHAR(250), + DATE_VAL TIMESTAMP DEFAULT NULL, + LONG_VAL BIGINT, + DOUBLE_VAL DOUBLE PRECISION, + IDENTIFYING CHAR(1) NOT NULL, + CONSTRAINT JOB_EXEC_PARAMS_FK FOREIGN KEY (JOB_EXECUTION_ID) REFERENCES V2_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) + ); + """; + private final static String POPULATE_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + """ + INSERT INTO V2_BATCH_JOB_EXECUTION_PARAMS + SELECT * FROM BATCH_JOB_EXECUTION_PARAMS; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + """ + DROP TABLE BATCH_JOB_EXECUTION_PARAMS; + """; + private final static String RENAME_BATCH_STEP_EXECUTION_V4_TABLE = + """ + CREATE TABLE V2_BATCH_STEP_EXECUTION ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + START_TIME TIMESTAMP NOT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + CONSTRAINT JOB_EXEC_STEP_FK FOREIGN KEY (JOB_EXECUTION_ID) REFERENCES V2_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) + ); + """; + private final static String POPULATE_BATCH_STEP_EXECUTION_V4_TABLE = + """ + INSERT INTO V2_BATCH_STEP_EXECUTION + SELECT * FROM BATCH_STEP_EXECUTION; + """; + + private final static String CLEANUP_BATCH_STEP_EXECUTION_V4_TABLE = + """ + DROP TABLE BATCH_STEP_EXECUTION; + """; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + """ + CREATE TABLE V2_BATCH_STEP_EXECUTION_CONTEXT ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + CONSTRAINT STEP_EXEC_CTX_FK FOREIGN KEY (STEP_EXECUTION_ID) REFERENCES V2_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID) + ); + """; + private final static String POPULATE_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + """ + INSERT INTO V2_BATCH_STEP_EXECUTION_CONTEXT + SELECT * FROM BATCH_STEP_EXECUTION_CONTEXT; + """; + private final static String CLEANUP_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + """ + DROP TABLE BATCH_STEP_EXECUTION_CONTEXT; + """; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + """ + CREATE TABLE V2_BATCH_JOB_EXECUTION_CONTEXT ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + CONSTRAINT JOB_EXEC_CTX_FK FOREIGN KEY (JOB_EXECUTION_ID) REFERENCES V2_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID) + ); + """; + private final static String POPULATE_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + """ + INSERT INTO V2_BATCH_JOB_EXECUTION_CONTEXT + SELECT * FROM BATCH_JOB_EXECUTION_CONTEXT; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + """ + DROP TABLE BATCH_JOB_EXECUTION_CONTEXT; + """; + private final static String RENAME_BATCH_STEP_EXECUTION_V4_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BATCH_STEP_EXECUTION_SEQ; + execute immediate 'CREATE sequence V2_BATCH_STEP_EXECUTION_SEQ start with ' || newSequenceStart; + end; + """; + private final static String CLEANUP_BATCH_STEP_EXECUTION_V4_SEQ = + """ + DROP SEQUENCE BATCH_STEP_EXECUTION_SEQ; + """; + private final static String RENAME_BATCH_JOB_EXECUTION_V4_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BATCH_JOB_EXECUTION_SEQ; + execute immediate 'CREATE sequence V2_BATCH_JOB_EXECUTION_SEQ start with ' || newSequenceStart; + end; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_V4_SEQ = + """ + DROP SEQUENCE BATCH_JOB_EXECUTION_SEQ; + """; + private final static String RENAME_BATCH_JOB_V4_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BATCH_JOB_SEQ; + execute immediate 'CREATE sequence V2_BATCH_JOB_SEQ start with ' || newSequenceStart; + end; + """; + private final static String CLEANUP_BATCH_JOB_V4_SEQ = + """ + DROP SEQUENCE BATCH_JOB_SEQ; + """; + /* + * Scripts to rename table Task V3 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V3_TABLE = + """ + CREATE TABLE TASK_EXECUTION + ( + TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + TASK_NAME VARCHAR(100), + EXIT_CODE INTEGER, + EXIT_MESSAGE VARCHAR(2500), + ERROR_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), + EXTERNAL_EXECUTION_ID VARCHAR(255), + PARENT_EXECUTION_ID BIGINT + ); + """; + + private final static String POPULATE_TASK_EXECUTION_V3_TABLE = + """ + INSERT INTO TASK_EXECUTION + SELECT * FROM BOOT3_TASK_EXECUTION; + """; + + private final static String CLEANUP_TASK_EXECUTION_V3_TABLE = + """ + DROP TABLE BOOT3_TASK_EXECUTION; + """; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V3_TABLE = + """ + CREATE TABLE TASK_EXECUTION_PARAMS + ( + TASK_EXECUTION_ID BIGINT NOT NULL, + TASK_PARAM VARCHAR(2500), + constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID) + references TASK_EXECUTION (TASK_EXECUTION_ID) + ); + """; + private final static String POPULATE_TASK_EXECUTION_PARAMS_V3_TABLE = + """ + INSERT INTO TASK_EXECUTION_PARAMS + SELECT * FROM BOOT3_TASK_EXECUTION_PARAMS; + """; + + private final static String CLEANUP_TASK_EXECUTION_PARAMS_V3_TABLE = + """ + DROP TABLE BOOT3_TASK_EXECUTION_PARAMS; + """; + private final static String RENAME_TASK_TASK_BATCH_V3_TABLE = + """ + CREATE TABLE TASK_TASK_BATCH + ( + TASK_EXECUTION_ID BIGINT NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID) + references TASK_EXECUTION (TASK_EXECUTION_ID) + ); + """; + + private final static String POPULATE_TASK_TASK_BATCH_V3_TABLE = + """ + INSERT INTO TASK_TASK_BATCH + SELECT * FROM BOOT3_TASK_TASK_BATCH; + """; + + private final static String CLEANUP_TASK_TASK_BATCH_V3_TABLE = + """ + DROP TABLE BOOT3_TASK_TASK_BATCH; + """; + private final static String RENAME_TASK_LOCK_V3_TABLE = + """ + CREATE TABLE TASK_LOCK + ( + LOCK_KEY CHAR(36) NOT NULL, + REGION VARCHAR(100) NOT NULL, + CLIENT_ID CHAR(36), + CREATED_DATE TIMESTAMP(9) NOT NULL, + constraint LOCK_PK primary key (LOCK_KEY, REGION) + ); + """; + + private final static String POPULATE_TASK_LOCK_V3_TABLE = + """ + INSERT INTO TASK_LOCK + SELECT * FROM BOOT3_TASK_LOCK; + """; + + private final static String CLEANUP_TASK_LOCK_V3_TABLE = + """ + DROP TABLE BOOT3_TASK_LOCK; + """; + private final static String RENAME_TASK_V3_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BOOT3_TASK_SEQ; + execute immediate 'CREATE sequence TASK_SEQ start with ' || newSequenceStart; + end; + """; + + private final static String CLEANUP_TASK_V3_SEQ = + """ + DROP SEQUENCE BOOT3_TASK_SEQ; + """; + private final static String RENAME_TASK_EXECUTION_METADATA_V3_TABLE = + """ + CREATE TABLE TASK_EXECUTION_METADATA + ( + ID BIGINT NOT NULL, + TASK_EXECUTION_ID BIGINT NOT NULL, + TASK_EXECUTION_MANIFEST CLOB, + primary key (ID), + CONSTRAINT TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES TASK_EXECUTION (TASK_EXECUTION_ID) + ); + """; + private final static String POPULATE_TASK_EXECUTION_METADATA_V3_TABLE = + """ + INSERT INTO BOOT3_TASK_EXECUTION_METADATA + SELECT * FROM TASK_EXECUTION_METADATA; + """; + + private final static String CLEANUP_TASK_EXECUTION_METADATA_V3_TABLE = + """ + DROP TABLE BOOT3_TASK_EXECUTION_METADATA; + """; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BOOT3_TASK_EXECUTION_METADATA_SEQ; + execute immediate 'CREATE sequence TASK_EXECUTION_METADATA_SEQ start with ' || newSequenceStart; + end; + """; + + private final static String CLEANUP_TASK_EXECUTION_METADATA_V3_SEQ = + """ + DROP SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ; + """; + /* + * Scripts to rename table Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V5_TABLE = + """ + CREATE TABLE BATCH_JOB_INSTANCE + ( + JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL, + JOB_KEY VARCHAR(32) NOT NULL, + constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY) + ); + """; + + private final static String POPULATE_BATCH_JOB_INSTANCE_V5_TABLE = + """ + INSERT INTO BATCH_JOB_INSTANCE + SELECT * FROM BOOT3_BATCH_JOB_INSTANCE; + """; + + private final static String CLEANUP_BATCH_JOB_INSTANCE_V5_TABLE = + """ + DROP TABLE BOOT3_BATCH_JOB_INSTANCE; + """; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_TABLE = + """ + CREATE TABLE BATCH_JOB_EXECUTION + ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), + constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID) + references BATCH_JOB_INSTANCE (JOB_INSTANCE_ID) + ); + """; + + private final static String POPULATE_BATCH_JOB_EXECUTION_V5_TABLE = + """ + INSERT INTO BATCH_JOB_EXECUTION + SELECT * FROM BOOT3_BATCH_JOB_EXECUTION; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_V5_TABLE = + """ + DROP TABLE BOOT3_BATCH_JOB_EXECUTION; + """; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + """ + CREATE TABLE BATCH_JOB_EXECUTION_PARAMS + ( + JOB_EXECUTION_ID BIGINT NOT NULL, + PARAMETER_NAME VARCHAR(100) NOT NULL, + PARAMETER_TYPE VARCHAR(100) NOT NULL, + PARAMETER_VALUE VARCHAR(2500), + IDENTIFYING CHAR(1) NOT NULL, + constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION (JOB_EXECUTION_ID) + ); + """; + + private final static String POPULATE_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + """ + INSERT INTO BOOT3_BATCH_JOB_EXECUTION_PARAMS + SELECT * FROM BOOT3_BATCH_JOB_EXECUTION_PARAMS; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + """ + DROP TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS; + """; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_TABLE = + """ + CREATE TABLE BATCH_STEP_EXECUTION + ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP(9) NOT NULL, + START_TIME TIMESTAMP(9) DEFAULT NULL, + END_TIME TIMESTAMP(9) DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT, + READ_COUNT BIGINT, + FILTER_COUNT BIGINT, + WRITE_COUNT BIGINT, + READ_SKIP_COUNT BIGINT, + WRITE_SKIP_COUNT BIGINT, + PROCESS_SKIP_COUNT BIGINT, + ROLLBACK_COUNT BIGINT, + EXIT_CODE VARCHAR(2500), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP(9), + constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION (JOB_EXECUTION_ID) + ); + """; + + private final static String POPULATE_BATCH_STEP_EXECUTION_V5_TABLE = + """ + INSERT INTO BATCH_STEP_EXECUTION + SELECT * FROM BOOT3_BATCH_STEP_EXECUTION; + """; + + private final static String CLEANUP_BATCH_STEP_EXECUTION_V5_TABLE = + """ + DROP TABLE BOOT3_BATCH_STEP_EXECUTION; + """; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + """ + CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT + ( + STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID) + references BATCH_STEP_EXECUTION (STEP_EXECUTION_ID) + ); + """; + + private final static String POPULATE_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + """ + INSERT INTO BATCH_STEP_EXECUTION_CONTEXT + SELECT * FROM BOOT3_BATCH_STEP_EXECUTION_CONTEXT; + """; + + private final static String CLEANUP_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + """ + DROP TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT; + """; + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + """ + CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT + ( + JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY, + SHORT_CONTEXT VARCHAR(2500) NOT NULL, + SERIALIZED_CONTEXT CLOB, + constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID) + references BATCH_JOB_EXECUTION (JOB_EXECUTION_ID) + ); + """; + + private final static String POPULATE_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + """ + INSERT INTO BATCH_JOB_EXECUTION_CONTEXT + SELECT * FROM BOOT3_BATCH_JOB_EXECUTION_CONTEXT; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + """ + DROP TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT; + """; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BOOT3_BATCH_STEP_EXECUTION_SEQ; + execute immediate 'CREATE sequence BATCH_STEP_EXECUTION_SEQ start with ' || newSequenceStart; + end; + """; + + private final static String CLEANUP_BATCH_STEP_EXECUTION_V5_SEQ = + """ + DROP SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ; + """; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BOOT3_BATCH_JOB_EXECUTION_SEQ; + execute immediate 'CREATE sequence BATCH_JOB_EXECUTION_SEQ start with ' || newSequenceStart; + end; + """; + + private final static String CLEANUP_BATCH_JOB_EXECUTION_V5_SEQ = + """ + DROP SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ; + """; + + + private final static String RENAME_BATCH_JOB_V5_SEQ = + """ + begin + declare newSequenceStart int; + set newSequenceStart = next value for BOOT3_BATCH_JOB_SEQ; + execute immediate 'CREATE sequence BATCH_JOB_SEQ start with ' || newSequenceStart; + end; + """; + + private final static String CLEANUP_BATCH_JOB_V5_SEQ = + """ + DROP SEQUENCE BOOT3_BATCH_JOB_SEQ; + """; + + @Override + public List dropBoot3Boot2Views() { + return Arrays.asList( + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_INSTANCE), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_BATCH), + SqlCommand.from(DROP_VIEW_AGGREGATE_STEP_EXECUTION) + ); + } + + @Override + public List renameTask3Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(RENAME_TASK_V3_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_SEQ), + SqlCommand.from(POPULATE_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(POPULATE_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(POPULATE_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(POPULATE_TASK_LOCK_V3_TABLE), + SqlCommand.from(POPULATE_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(CLEANUP_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(CLEANUP_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(CLEANUP_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(CLEANUP_TASK_V3_SEQ), + SqlCommand.from(CLEANUP_TASK_LOCK_V3_TABLE), + SqlCommand.from(CLEANUP_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(CLEANUP_TASK_EXECUTION_METADATA_V3_SEQ) + ); + } + + @Override + public List renameBatch5Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V5_SEQ), + + SqlCommand.from(POPULATE_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(POPULATE_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(POPULATE_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(POPULATE_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(POPULATE_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(POPULATE_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + + SqlCommand.from(CLEANUP_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(CLEANUP_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(CLEANUP_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(CLEANUP_BATCH_STEP_EXECUTION_V5_SEQ), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_V5_SEQ), + SqlCommand.from(CLEANUP_BATCH_JOB_V5_SEQ) + ); + } + + @Override + public List renameTask2Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(POPULATE_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(POPULATE_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(POPULATE_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(RENAME_TASK_V2_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V2_TABLE), + SqlCommand.from(POPULATE_TASK_LOCK_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(POPULATE_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_SEQ), + SqlCommand.from(CLEANUP_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(CLEANUP_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(CLEANUP_TASK_LOCK_V2_TABLE), + SqlCommand.from(CLEANUP_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(CLEANUP_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(CLEANUP_TASK_EXECUTION_METADATA_V2_SEQ), + SqlCommand.from(CLEANUP_TASK_V2_SEQ) + ); + } + + @Override + public List renameBatch4Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(POPULATE_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(POPULATE_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(POPULATE_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(POPULATE_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(POPULATE_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(POPULATE_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V4_SEQ), + + SqlCommand.from(CLEANUP_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(CLEANUP_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(CLEANUP_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(CLEANUP_BATCH_STEP_EXECUTION_V4_SEQ), + SqlCommand.from(CLEANUP_BATCH_JOB_EXECUTION_V4_SEQ), + SqlCommand.from(CLEANUP_BATCH_JOB_V4_SEQ) + + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java index 7487ee68b5..b2f476bd44 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V1__Initial_Setup.java @@ -111,7 +111,7 @@ public class V1__Initial_Setup extends AbstractInitialSetupMigration { ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE SEQUENCE TASK_SEQ AS BIGINT START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + "CREATE SEQUENCE TASK_SEQ AS BIGINT START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE TASK_LOCK (\n" + diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..17718d3fc0 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..f711020729 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..23071e63e3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,204 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE TIMESTAMP(9) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ AS BIGINT START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST CLOB,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java new file mode 100644 index 0000000000..bb0b309056 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V9__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V9__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..92bd793c85 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V9__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V9__DropJobConfigurationLocation extends AbstractMigration { + public V9__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/MariadbBeforeBaseline.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/MariadbBeforeBaseline.java new file mode 100644 index 0000000000..37427e2ada --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/MariadbBeforeBaseline.java @@ -0,0 +1,198 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBaselineCallback; + +/** + * Baselining schema setup for {@code postgres}. + * + * @author Janne Valkealahti + * + */ +public class MariadbBeforeBaseline extends AbstractBaselineCallback { + + public final static String DROP_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX = + "drop index AUDIT_RECORDS_AUDIT_ACTION_IDX on AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX = + "drop index AUDIT_RECORDS_AUDIT_OPERATION_IDX on AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX = + "drop index AUDIT_RECORDS_CORRELATION_ID_IDX on AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_CREATED_ON_IDX_INDEX = + "drop index AUDIT_RECORDS_CREATED_ON_IDX on AUDIT_RECORDS"; + + public final static String CREATE_APP_REGISTRATION_TMP_TABLE = + V1__Initial_Setup.CREATE_APP_REGISTRATION_TABLE.replaceFirst("app_registration", "app_registration_tmp"); + + public final static String INSERT_APP_REGISTRATION_DATA = + "insert into\n" + + " app_registration_tmp (id, object_version, default_version, metadata_uri, name, type, uri, version) \n" + + " select id, object_Version, default_Version, metadata_Uri, name, type, uri, version\n" + + " from APP_REGISTRATION"; + + public final static String DROP_APP_REGISTRATION_TABLE = + "drop table APP_REGISTRATION"; + + public final static String RENAME_APP_REGISTRATION_TMP_TABLE = + "alter table app_registration_tmp rename to app_registration"; + + public final static String CREATE_STREAM_DEFINITIONS_TMP_TABLE = + V1__Initial_Setup.CREATE_STREAM_DEFINITIONS_TABLE.replaceFirst("stream_definitions", "stream_definitions_tmp"); + + public final static String INSERT_STREAM_DEFINITIONS_DATA = + "insert into\n" + + " stream_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from STREAM_DEFINITIONS"; + + public final static String DROP_STREAM_DEFINITIONS_TABLE = + "drop table STREAM_DEFINITIONS"; + + public final static String RENAME_STREAM_DEFINITIONS_TMP_TABLE = + "alter table stream_definitions_tmp rename to stream_definitions"; + + public final static String CREATE_TASK_DEFINITIONS_TMP_TABLE = + V1__Initial_Setup.CREATE_TASK_DEFINITIONS_TABLE.replaceFirst("task_definitions", "task_definitions_tmp"); + + public final static String INSERT_TASK_DEFINITIONS_DATA = + "insert into\n" + + " task_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from TASK_DEFINITIONS"; + + public final static String DROP_TASK_DEFINITIONS_TABLE = + "drop table TASK_DEFINITIONS"; + + public final static String RENAME_TASK_DEFINITIONS_TMP_TABLE = + "alter table task_definitions_tmp rename to task_definitions"; + + public final static String CREATE_AUDIT_RECORDS_TMP_TABLE = + V1__Initial_Setup.CREATE_AUDIT_RECORDS_TABLE.replaceFirst("audit_records", "audit_records_tmp"); + + public final static String INSERT_AUDIT_RECORDS_DATA = + "insert into\n" + + " audit_records_tmp (id, audit_action, audit_data, audit_operation, correlation_id, created_by, created_on)\n" + + " select id, audit_Action, audit_data, audit_Operation, correlation_id, created_by, created_On\n" + + " from AUDIT_RECORDS"; + + public final static String DROP_AUDIT_RECORDS_TABLE = + "drop table AUDIT_RECORDS"; + + public final static String RENAME_AUDIT_RECORDS_TMP_TABLE = + "alter table audit_records_tmp rename to audit_records"; + + public final static String CREATE_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX = + "create index audit_records_audit_action_idx on audit_records (audit_action)"; + + public final static String CREATE_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX = + "create index audit_records_audit_operation_idx on audit_records (audit_operation)"; + + public final static String CREATE_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX = + "create index audit_records_correlation_id_idx on audit_records (correlation_id)"; + + public final static String CREATE_AUDIT_RECORDS_CREATED_ON_IDX_INDEX = + "create index audit_records_created_on_idx on audit_records (created_on)"; + + /** + * Instantiates a new postgres before baseline. + */ + public MariadbBeforeBaseline() { + super(new V1__Initial_Setup()); + } + + @Override + public List dropIndexes() { + return Arrays.asList( + SqlCommand.from(DROP_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX), + SqlCommand.from(DROP_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX), + SqlCommand.from(DROP_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX), + SqlCommand.from(DROP_AUDIT_RECORDS_CREATED_ON_IDX_INDEX)); + } + + @Override + public List changeAppRegistrationTable() { + return Arrays.asList( + SqlCommand.from(CREATE_APP_REGISTRATION_TMP_TABLE), + SqlCommand.from(INSERT_APP_REGISTRATION_DATA), + SqlCommand.from(DROP_APP_REGISTRATION_TABLE), + SqlCommand.from(RENAME_APP_REGISTRATION_TMP_TABLE)); + } + + @Override + public List changeUriRegistryTable() { + // Other db types have support migration of app_registration + // and hibernate_sequence from dataflow 1.7.x line. As + // mariadb is new supported db type(beyond previously using it + // as mysql), we should not have a need for these migrations. + return Collections.emptyList(); + } + + @Override + public List changeStreamDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_STREAM_DEFINITIONS_TMP_TABLE), + SqlCommand.from(INSERT_STREAM_DEFINITIONS_DATA), + SqlCommand.from(DROP_STREAM_DEFINITIONS_TABLE), + SqlCommand.from(RENAME_STREAM_DEFINITIONS_TMP_TABLE)); + } + + @Override + public List changeTaskDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_DEFINITIONS_TMP_TABLE), + SqlCommand.from(INSERT_TASK_DEFINITIONS_DATA), + SqlCommand.from(DROP_TASK_DEFINITIONS_TABLE), + SqlCommand.from(RENAME_TASK_DEFINITIONS_TMP_TABLE)); + } + + @Override + public List changeAuditRecordsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_AUDIT_RECORDS_TMP_TABLE), + SqlCommand.from(INSERT_AUDIT_RECORDS_DATA), + SqlCommand.from(DROP_AUDIT_RECORDS_TABLE), + SqlCommand.from(RENAME_AUDIT_RECORDS_TMP_TABLE)); + } + + @Override + public List createIndexes() { + return Arrays.asList( + SqlCommand.from(CREATE_AUDIT_RECORDS_AUDIT_ACTION_IDX_INDEX), + SqlCommand.from(CREATE_AUDIT_RECORDS_AUDIT_OPERATION_IDX_INDEX), + SqlCommand.from(CREATE_AUDIT_RECORDS_CORRELATION_ID_IDX_INDEX), + SqlCommand.from(CREATE_AUDIT_RECORDS_CREATED_ON_IDX_INDEX)); + } + + @Override + public List createTaskLockTable() { + return Arrays.asList( + SqlCommand.from(V1__Initial_Setup.CREATE_TASK_LOCK_TABLE)); + } + + @Override + public List createTaskDeploymentTable() { + return Arrays.asList(SqlCommand.from( + V1__Initial_Setup.CREATE_TASK_DEPLOYMENT_TABLE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V10__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V10__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..985bfd3141 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V10__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V10__DropJobConfigurationLocation extends AbstractMigration { + public V10__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V11__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V11__CreateBatchIndexes.java new file mode 100644 index 0000000000..ac9e6dfd4a --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V11__CreateBatchIndexes.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V11__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V12__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V12__CreateTaskParentIndex.java new file mode 100644 index 0000000000..b491f8f921 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V12__CreateTaskParentIndex.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V12__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V13__Remove_Task2_Batch4_Support.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V13__Remove_Task2_Batch4_Support.java new file mode 100644 index 0000000000..6152d4c345 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V13__Remove_Task2_Batch4_Support.java @@ -0,0 +1,230 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractRemoveBatch4Task2Tables; + +/** + * Prefixes Task V2 tables and V4 Batch tables with a V2_ prefix as well as remove the BOOT3_ prefix for V3 task and v5 batch tables. + * + * @author Glenn Renfro + */ +public class V13__Remove_Task2_Batch4_Support extends AbstractRemoveBatch4Task2Tables { + + /* + * Scripts to remove views used for Task V2/Batch V4 Task V3/Batch V5 queries. + */ + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION = + "DROP VIEW AGGREGATE_TASK_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS = + "DROP VIEW AGGREGATE_TASK_EXECUTION_PARAMS"; + + private final static String DROP_VIEW_AGGREGATE_JOB_EXECUTION = + "DROP VIEW AGGREGATE_JOB_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_JOB_INSTANCE = + "DROP VIEW AGGREGATE_JOB_INSTANCE"; + + private final static String DROP_VIEW_AGGREGATE_TASK_BATCH = + "DROP VIEW AGGREGATE_TASK_BATCH"; + + private final static String DROP_VIEW_AGGREGATE_STEP_EXECUTION = + "DROP VIEW AGGREGATE_STEP_EXECUTION"; + + /* + * Scripts to rename Task V2 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V2_TABLE = + "ALTER TABLE TASK_EXECUTION RENAME TO V2_TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_PARAMS RENAME TO V2_TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V2_TABLE = + "ALTER TABLE TASK_TASK_BATCH RENAME TO V2_TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V2_TABLE = + "ALTER TABLE TASK_LOCK RENAME TO V2_TASK_LOCK"; + + private final static String RENAME_TASK_V2_SEQ = + "ALTER TABLE TASK_SEQ RENAME TO V2_TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_METADATA RENAME TO V2_TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_SEQ = + "ALTER TABLE TASK_EXECUTION_METADATA_SEQ RENAME TO V2_TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V4_TABLE = + "ALTER TABLE BATCH_JOB_INSTANCE RENAME TO V2_BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION RENAME TO V2_BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME TO V2_BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION RENAME TO V2_BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION_CONTEXT RENAME TO V2_BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_CONTEXT RENAME TO V2_BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_SEQ = + "ALTER TABLE BATCH_STEP_EXECUTION_SEQ RENAME TO V2_BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_SEQ = + "ALTER TABLE BATCH_JOB_EXECUTION_SEQ RENAME TO V2_BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V4_SEQ = + "ALTER TABLE BATCH_JOB_SEQ RENAME TO V2_BATCH_JOB_SEQ"; + + /* + * Scripts to rename Task V3 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION RENAME TO TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_PARAMS RENAME TO TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V3_TABLE = + "ALTER TABLE BOOT3_TASK_TASK_BATCH RENAME TO TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V3_TABLE = + "ALTER TABLE BOOT3_TASK_LOCK RENAME TO TASK_LOCK"; + + private final static String RENAME_TASK_V3_SEQ = + "ALTER TABLE BOOT3_TASK_SEQ RENAME TO TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_METADATA RENAME TO TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_SEQ = + "ALTER TABLE BOOT3_TASK_EXECUTION_METADATA_SEQ RENAME TO TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_INSTANCE RENAME TO BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION RENAME TO BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS RENAME TO BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION RENAME TO BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT RENAME TO BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT RENAME TO BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION_SEQ RENAME TO BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_SEQ RENAME TO BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_JOB_SEQ RENAME TO BATCH_JOB_SEQ"; + + @Override + public List dropBoot3Boot2Views() { + return Arrays.asList( + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_INSTANCE), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_BATCH), + SqlCommand.from(DROP_VIEW_AGGREGATE_STEP_EXECUTION) + ); + } + + @Override + public List renameTask3Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(RENAME_TASK_V3_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_SEQ) + ); + } + + @Override + public List renameBatch5Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V5_SEQ) + ); + } + + @Override + public List renameTask2Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(RENAME_TASK_V2_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_SEQ) + ); + } + + @Override + public List renameBatch4Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V4_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V1__Initial_Setup.java new file mode 100644 index 0000000000..5db729f703 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V1__Initial_Setup.java @@ -0,0 +1,313 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractInitialSetupMigration; + +/** + * Initial schema setup for {@code mysql}. + * + * @author Janne Valkealahti + * + */ +public class V1__Initial_Setup extends AbstractInitialSetupMigration { + + public final static String CREATE_HIBERNATE_SEQUENCE = + "create sequence if not exists hibernate_sequence start 1 increment 1"; + + public final static String CREATE_APP_REGISTRATION_TABLE = + "create table app_registration (\n" + + " id bigint not null,\n" + + " object_version bigint,\n" + + " default_version bit,\n" + + " metadata_uri longtext,\n" + + " name varchar(255),\n" + + " type integer,\n" + + " uri longtext,\n" + + " version varchar(255),\n" + + " primary key (id)\n" + + ")"; + + public final static String CREATE_AUDIT_RECORDS_TABLE = + "create table audit_records (\n" + + " id bigint not null,\n" + + " audit_action bigint,\n" + + " audit_data longtext,\n" + + " audit_operation bigint,\n" + + " correlation_id varchar(255),\n" + + " created_by varchar(255),\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; + + public final static String CREATE_STREAM_DEFINITIONS_TABLE = + "create table stream_definitions (\n" + + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; + + public final static String CREATE_TASK_DEFINITIONS_TABLE = + "create table task_definitions (\n" + + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; + + public final static String CREATE_TASK_DEPLOYMENT_TABLE = + "create table task_deployment (\n" + + " id bigint not null,\n" + + " object_version bigint,\n" + + " task_deployment_id varchar(255) not null,\n" + + " task_definition_name varchar(255) not null,\n" + + " platform_name varchar(255) not null,\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE TABLE TASK_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + private final static String INSERT_TASK_SEQ_SEQUENCE = + "INSERT INTO TASK_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME NOT NULL,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " TYPE_CD VARCHAR(6) NOT NULL,\n" + + " KEY_NAME VARCHAR(100) NOT NULL,\n" + + " STRING_VAL VARCHAR(250),\n" + + " DATE_VAL DATETIME DEFAULT NULL,\n" + + " LONG_VAL BIGINT,\n" + + " DOUBLE_VAL DOUBLE PRECISION,\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " START_TIME DATETIME NOT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE TABLE BATCH_STEP_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_BATCH_STEP_EXECUTION_SEQUENCE = + "INSERT INTO BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_STEP_EXECUTION_SEQ)"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE TABLE BATCH_JOB_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_BATCH_JOB_EXECUTION_SEQUENCE = + "INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_EXECUTION_SEQ)"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE TABLE BATCH_JOB_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_BATCH_JOB_SEQUENCE = + "INSERT INTO BATCH_JOB_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_SEQ)"; + + public V1__Initial_Setup() { + super(null); + } + + @Override + public List createHibernateSequence() { + return Arrays.asList( + SqlCommand.from(CREATE_HIBERNATE_SEQUENCE)); + } + + @Override + public List createAppRegistrationTable() { + return Arrays.asList( + SqlCommand.from(CREATE_APP_REGISTRATION_TABLE)); + } + + @Override + public List createTaskDeploymentTable() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_DEPLOYMENT_TABLE)); + } + + @Override + public List createAuditRecordsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_AUDIT_RECORDS_TABLE)); + } + + @Override + public List createStreamDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_STREAM_DEFINITIONS_TABLE)); + } + + @Override + public List createTaskDefinitionsTable() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_DEFINITIONS_TABLE)); + } + + @Override + public List createTaskTables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(INSERT_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE)); + } + + @Override + public List createBatchTables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(INSERT_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(INSERT_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE), + SqlCommand.from(INSERT_BATCH_JOB_SEQUENCE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V2__Add_Descriptions_And_OriginalDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V2__Add_Descriptions_And_OriginalDefinition.java new file mode 100644 index 0000000000..7d2be50085 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V2__Add_Descriptions_And_OriginalDefinition.java @@ -0,0 +1,81 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds description column to stream_definitions and task_definitions + * tables and original_definition column to stream_definitions. + * + * @author Daniel Serleg + * @author Ilayaperumal Gopinathan + * @author Michael Minella + * + * @since 2.3 + */ +public class V2__Add_Descriptions_And_OriginalDefinition extends BaseJavaMigration { + + public final static String ALTER_STREAM_DEFINITION_TABLE_DESC = "alter table stream_definitions add description varchar(255)"; + + public final static String ALTER_STREAM_DEFINITION_TABLE_ORIG_DEF = "alter table stream_definitions add original_definition longtext"; + + public final static String ALTER_TASK_DEFINITION_TABLE = "" + + "alter table task_definitions add description varchar(255)"; + + public final static String UPDATE_STREAM_DEFINITION_TABLE_ORIG_DEF = "update stream_definitions set original_definition=definition"; + + public final static String CREATE_TASK_METADATA_TABLE = + "CREATE TABLE task_execution_metadata (\n" + + " id BIGINT NOT NULL,\n" + + " task_execution_id BIGINT NOT NULL,\n" + + " task_execution_manifest LONGTEXT,\n" + + " primary key (id),\n" + + " CONSTRAINT TASK_METADATA_FK FOREIGN KEY (task_execution_id)\n" + + " REFERENCES TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_METADATA_SEQUENCE = + "CREATE TABLE task_execution_metadata_seq (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + public final static String INSERT_TASK_METADATA_SEQUENCE = + "INSERT INTO task_execution_metadata_seq (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from task_execution_metadata_seq)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ALTER_STREAM_DEFINITION_TABLE_DESC), + SqlCommand.from(ALTER_STREAM_DEFINITION_TABLE_ORIG_DEF), + SqlCommand.from(ALTER_TASK_DEFINITION_TABLE), + SqlCommand.from(UPDATE_STREAM_DEFINITION_TABLE_ORIG_DEF), + SqlCommand.from(CREATE_TASK_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_METADATA_SEQUENCE), + SqlCommand.from(INSERT_TASK_METADATA_SEQUENCE))); + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V3__Add_Platform_To_AuditRecords.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V3__Add_Platform_To_AuditRecords.java new file mode 100644 index 0000000000..ef467e5285 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V3__Add_Platform_To_AuditRecords.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds platformName column to audit_records. + * + * @author Daniel Serleg + * + * @since 2.4 + */ +public class V3__Add_Platform_To_AuditRecords extends BaseJavaMigration { + + public final static String ALTER_AUDIT_RECORDS_TABLE_PLATFORM = "alter table audit_records add platform_name varchar(255)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ALTER_AUDIT_RECORDS_TABLE_PLATFORM))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V4__Add_Step_Name_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V4__Add_Step_Name_Indexes.java new file mode 100644 index 0000000000..86341c3c50 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V4__Add_Step_Name_Indexes.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for STEP_NAME on BATCH_STEP_EXECUTION. + * + * @author Glenn Renfro + * + * @since 2.7 + */ +public class V4__Add_Step_Name_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_BATCH_STEP_EXECUTION = "create index STEP_NAME_IDX on BATCH_STEP_EXECUTION (STEP_NAME)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_BATCH_STEP_EXECUTION))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..4966bbd8c6 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..de3d5cd3f3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,206 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V6__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_SEQ = + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST LONGTEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + "STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,\n" + + "VERSION BIGINT NOT NULL,\n" + + "STEP_NAME VARCHAR(100) NOT NULL,\n" + + "JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + "CREATE_TIME DATETIME(6) NOT NULL,\n" + + "START_TIME DATETIME(6) DEFAULT NULL ,\n" + + "END_TIME DATETIME(6) DEFAULT NULL ,\n" + + "STATUS VARCHAR(10) ,\n" + + "COMMIT_COUNT BIGINT ,\n" + + "READ_COUNT BIGINT ,\n" + + "FILTER_COUNT BIGINT ,\n" + + "WRITE_COUNT BIGINT ,\n" + + "READ_SKIP_COUNT BIGINT ,\n" + + "WRITE_SKIP_COUNT BIGINT ,\n" + + "PROCESS_SKIP_COUNT BIGINT ,\n" + + "ROLLBACK_COUNT BIGINT ,\n" + + "EXIT_CODE VARCHAR(2500) ,\n" + + "EXIT_MESSAGE VARCHAR(2500) ,\n" + + "LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V7__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V7__Boot3_Boot_Version.java new file mode 100644 index 0000000000..f773634f32 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V7__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V7__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java new file mode 100644 index 0000000000..d7099b8a13 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractCaseSensitiveMigration; + +/** + * Since MariaDB operates in a case-sensitive mode for table and column names we need TASK_ tables referenced with a prefix to be uppercase. + * + * @author Corneil du Plessis + */ +public class V8__RenameLowerCaseTables extends AbstractCaseSensitiveMigration { + + + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_LC), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_TBL) + ); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java new file mode 100644 index 0000000000..7152079689 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V9__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java index 444bfc5384..c72851deb4 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlBeforeBaseline.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ * Baselining schema setup for {@code postgres}. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class MysqlBeforeBaseline extends AbstractBaselineCallback { @@ -46,9 +46,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_APP_REGISTRATION_DATA = "insert into\n" + - " app_registration_tmp (id, object_version, default_version, metadata_uri, name, type, uri, version) \n" + - " select id, object_Version, default_Version, metadata_Uri, name, type, uri, version\n" + - " from APP_REGISTRATION"; + " app_registration_tmp (id, object_version, default_version, metadata_uri, name, type, uri, version) \n" + + " select id, object_Version, default_Version, metadata_Uri, name, type, uri, version\n" + + " from APP_REGISTRATION"; public final static String DROP_APP_REGISTRATION_TABLE = "drop table APP_REGISTRATION"; @@ -61,9 +61,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_STREAM_DEFINITIONS_DATA = "insert into\n" + - " stream_definitions_tmp (definition_name, definition) \n" + - " select DEFINITION_NAME, DEFINITION\n" + - " from STREAM_DEFINITIONS"; + " stream_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from STREAM_DEFINITIONS"; public final static String DROP_STREAM_DEFINITIONS_TABLE = "drop table STREAM_DEFINITIONS"; @@ -76,9 +76,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_TASK_DEFINITIONS_DATA = "insert into\n" + - " task_definitions_tmp (definition_name, definition) \n" + - " select DEFINITION_NAME, DEFINITION\n" + - " from TASK_DEFINITIONS"; + " task_definitions_tmp (definition_name, definition) \n" + + " select DEFINITION_NAME, DEFINITION\n" + + " from TASK_DEFINITIONS"; public final static String DROP_TASK_DEFINITIONS_TABLE = "drop table TASK_DEFINITIONS"; @@ -91,9 +91,9 @@ public class MysqlBeforeBaseline extends AbstractBaselineCallback { public final static String INSERT_AUDIT_RECORDS_DATA = "insert into\n" + - " audit_records_tmp (id, audit_action, audit_data, audit_operation, correlation_id, created_by, created_on)\n" + - " select id, audit_Action, audit_data, audit_Operation, correlation_id, created_by, created_On\n" + - " from AUDIT_RECORDS"; + " audit_records_tmp (id, audit_action, audit_data, audit_operation, correlation_id, created_by, created_on)\n" + + " select id, audit_Action, audit_data, audit_Operation, correlation_id, created_by, created_On\n" + + " from AUDIT_RECORDS"; public final static String DROP_AUDIT_RECORDS_TABLE = "drop table AUDIT_RECORDS"; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java index 8c542a0753..8aa810add8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/MysqlMigrateUriRegistrySqlCommand.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,7 +27,7 @@ * {@code URI_REGISTRY} into {@code app_registration}. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class MysqlMigrateUriRegistrySqlCommand extends AbstractMigrateUriRegistrySqlCommand { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V10__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V10__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..c745ac2b4c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V10__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V10__DropJobConfigurationLocation extends AbstractMigration { + public V10__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V11__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V11__CreateBatchIndexes.java new file mode 100644 index 0000000000..98924a10e1 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V11__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V11__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V12__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V12__CreateTaskParentIndex.java new file mode 100644 index 0000000000..b6f3f4321e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V12__CreateTaskParentIndex.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V12__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V13__Remove_Task2_Batch4_Support.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V13__Remove_Task2_Batch4_Support.java new file mode 100644 index 0000000000..3147bab810 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V13__Remove_Task2_Batch4_Support.java @@ -0,0 +1,230 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractRemoveBatch4Task2Tables; + +/** + * Prefixes Task V2 tables and V4 Batch tables with a V2_ prefix as well as remove the BOOT3_ prefix for V3 task and v5 batch tables. + * + * @author Glenn Renfro + */ +public class V13__Remove_Task2_Batch4_Support extends AbstractRemoveBatch4Task2Tables { + + /* + * Scripts to remove views used for Task V2/Batch V4 Task V3/Batch V5 queries. + */ + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION = + "DROP VIEW AGGREGATE_TASK_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS = + "DROP VIEW AGGREGATE_TASK_EXECUTION_PARAMS"; + + private final static String DROP_VIEW_AGGREGATE_JOB_EXECUTION = + "DROP VIEW AGGREGATE_JOB_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_JOB_INSTANCE = + "DROP VIEW AGGREGATE_JOB_INSTANCE"; + + private final static String DROP_VIEW_AGGREGATE_TASK_BATCH = + "DROP VIEW AGGREGATE_TASK_BATCH"; + + private final static String DROP_VIEW_AGGREGATE_STEP_EXECUTION = + "DROP VIEW AGGREGATE_STEP_EXECUTION"; + + /* + * Scripts to rename Task V2 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V2_TABLE = + "ALTER TABLE TASK_EXECUTION RENAME TO V2_TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_PARAMS RENAME TO V2_TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V2_TABLE = + "ALTER TABLE TASK_TASK_BATCH RENAME TO V2_TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V2_TABLE = + "ALTER TABLE TASK_LOCK RENAME TO V2_TASK_LOCK"; + + private final static String RENAME_TASK_V2_SEQ = + "ALTER TABLE TASK_SEQ RENAME TO V2_TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_METADATA RENAME TO V2_TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_SEQ = + "ALTER TABLE TASK_EXECUTION_METADATA_SEQ RENAME TO V2_TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V4_TABLE = + "ALTER TABLE BATCH_JOB_INSTANCE RENAME TO V2_BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION RENAME TO V2_BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME TO V2_BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION RENAME TO V2_BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION_CONTEXT RENAME TO V2_BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_CONTEXT RENAME TO V2_BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_SEQ = + "ALTER TABLE BATCH_STEP_EXECUTION_SEQ RENAME TO V2_BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_SEQ = + "ALTER TABLE BATCH_JOB_EXECUTION_SEQ RENAME TO V2_BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V4_SEQ = + "ALTER TABLE BATCH_JOB_SEQ RENAME TO V2_BATCH_JOB_SEQ"; + + /* + * Scripts to rename Task V3 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION RENAME TO TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_PARAMS RENAME TO TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V3_TABLE = + "ALTER TABLE BOOT3_TASK_TASK_BATCH RENAME TO TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V3_TABLE = + "ALTER TABLE BOOT3_TASK_LOCK RENAME TO TASK_LOCK"; + + private final static String RENAME_TASK_V3_SEQ = + "ALTER TABLE BOOT3_TASK_SEQ RENAME TO TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_METADATA RENAME TO TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_SEQ = + "ALTER TABLE BOOT3_TASK_EXECUTION_METADATA_SEQ RENAME TO TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_INSTANCE RENAME TO BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION RENAME TO BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS RENAME TO BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION RENAME TO BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT RENAME TO BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT RENAME TO BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION_SEQ RENAME TO BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_SEQ RENAME TO BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_JOB_SEQ RENAME TO BATCH_JOB_SEQ"; + + @Override + public List dropBoot3Boot2Views() { + return Arrays.asList( + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_INSTANCE), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_BATCH), + SqlCommand.from(DROP_VIEW_AGGREGATE_STEP_EXECUTION) + ); + } + + @Override + public List renameTask3Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(RENAME_TASK_V3_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_SEQ) + ); + } + + @Override + public List renameBatch5Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V5_SEQ) + ); + } + + @Override + public List renameTask2Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(RENAME_TASK_V2_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_SEQ) + ); + } + + @Override + public List renameBatch4Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V4_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java index 323926bee3..e10ac0ad31 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V1__Initial_Setup.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,227 +25,227 @@ * Initial schema setup for {@code mysql}. * * @author Janne Valkealahti - * + * @author Chris Bono */ public class V1__Initial_Setup extends AbstractInitialSetupMigration { public final static String CREATE_HIBERNATE_SEQUENCE_TABLE = "create table if not exists hibernate_sequence (\n" + - " next_val bigint\n" + - ")"; + " next_val bigint\n" + + ")"; public final static String INSERT_HIBERNATE_SEQUENCE_TABLE = "insert into hibernate_sequence (next_val)\n" + - " select * from (select 1 as next_val) as temp\n" + - " where not exists(select * from hibernate_sequence)"; + " select * from (select 1 as next_val) as temp\n" + + " where not exists(select * from hibernate_sequence)"; public final static String CREATE_APP_REGISTRATION_TABLE = "create table app_registration (\n" + - " id bigint not null,\n" + - " object_version bigint,\n" + - " default_version bit,\n" + - " metadata_uri longtext,\n" + - " name varchar(255),\n" + - " type integer,\n" + - " uri longtext,\n" + - " version varchar(255),\n" + - " primary key (id)\n" + - ")"; + " id bigint not null,\n" + + " object_version bigint,\n" + + " default_version bit,\n" + + " metadata_uri longtext,\n" + + " name varchar(255),\n" + + " type integer,\n" + + " uri longtext,\n" + + " version varchar(255),\n" + + " primary key (id)\n" + + ")"; public final static String CREATE_AUDIT_RECORDS_TABLE = "create table audit_records (\n" + - " id bigint not null,\n" + - " audit_action bigint,\n" + - " audit_data longtext,\n" + - " audit_operation bigint,\n" + - " correlation_id varchar(255),\n" + - " created_by varchar(255),\n" + - " created_on datetime,\n" + - " primary key (id)\n" + - ")"; + " id bigint not null,\n" + + " audit_action bigint,\n" + + " audit_data longtext,\n" + + " audit_operation bigint,\n" + + " correlation_id varchar(255),\n" + + " created_by varchar(255),\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; public final static String CREATE_STREAM_DEFINITIONS_TABLE = "create table stream_definitions (\n" + - " definition_name varchar(255) not null,\n" + - " definition longtext,\n" + - " primary key (definition_name)\n" + - ")"; + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; public final static String CREATE_TASK_DEFINITIONS_TABLE = "create table task_definitions (\n" + - " definition_name varchar(255) not null,\n" + - " definition longtext,\n" + - " primary key (definition_name)\n" + - ")"; + " definition_name varchar(255) not null,\n" + + " definition longtext,\n" + + " primary key (definition_name)\n" + + ")"; public final static String CREATE_TASK_DEPLOYMENT_TABLE = "create table task_deployment (\n" + - " id bigint not null,\n" + - " object_version bigint,\n" + - " task_deployment_id varchar(255) not null,\n" + - " task_definition_name varchar(255) not null,\n" + - " platform_name varchar(255) not null,\n" + - " created_on datetime,\n" + - " primary key (id)\n" + - ")"; + " id bigint not null,\n" + + " object_version bigint,\n" + + " task_deployment_id varchar(255) not null,\n" + + " task_definition_name varchar(255) not null,\n" + + " platform_name varchar(255) not null,\n" + + " created_on datetime,\n" + + " primary key (id)\n" + + ")"; public final static String CREATE_TASK_EXECUTION_TABLE = "CREATE TABLE TASK_EXECUTION (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME DATETIME DEFAULT NULL,\n" + - " END_TIME DATETIME DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = "CREATE TABLE TASK_EXECUTION_PARAMS (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; public final static String CREATE_TASK_TASK_BATCH = "CREATE TABLE TASK_TASK_BATCH (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = "CREATE TABLE TASK_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; private final static String INSERT_TASK_SEQ_SEQUENCE = "INSERT INTO TASK_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE TASK_LOCK (\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE DATETIME(6) NOT NULL,\n" + - " constraint LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ")"; + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = "CREATE TABLE BATCH_JOB_INSTANCE (\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ")"; + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = "CREATE TABLE BATCH_JOB_EXECUTION (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME NOT NULL,\n" + - " START_TIME DATETIME DEFAULT NULL,\n" + - " END_TIME DATETIME DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME,\n" + - " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + - " constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME NOT NULL,\n" + + " START_TIME DATETIME DEFAULT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = "CREATE TABLE BATCH_JOB_EXECUTION_PARAMS (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " TYPE_CD VARCHAR(6) NOT NULL,\n" + - " KEY_NAME VARCHAR(100) NOT NULL,\n" + - " STRING_VAL VARCHAR(250),\n" + - " DATE_VAL DATETIME DEFAULT NULL,\n" + - " LONG_VAL BIGINT,\n" + - " DOUBLE_VAL DOUBLE PRECISION,\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " TYPE_CD VARCHAR(6) NOT NULL,\n" + + " KEY_NAME VARCHAR(100) NOT NULL,\n" + + " STRING_VAL VARCHAR(250),\n" + + " DATE_VAL DATETIME DEFAULT NULL,\n" + + " LONG_VAL BIGINT,\n" + + " DOUBLE_VAL DOUBLE PRECISION,\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = "CREATE TABLE BATCH_STEP_EXECUTION (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " START_TIME DATETIME NOT NULL,\n" + - " END_TIME DATETIME DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME,\n" + - " constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " START_TIME DATETIME NOT NULL,\n" + + " END_TIME DATETIME DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME,\n" + + " constraint JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BATCH_STEP_EXECUTION_CONTEXT (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BATCH_JOB_EXECUTION_CONTEXT (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = "CREATE TABLE BATCH_STEP_EXECUTION_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; public final static String INSERT_BATCH_STEP_EXECUTION_SEQUENCE = "INSERT INTO BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_STEP_EXECUTION_SEQ)"; public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = "CREATE TABLE BATCH_JOB_EXECUTION_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; public final static String INSERT_BATCH_JOB_EXECUTION_SEQUENCE = "INSERT INTO BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_EXECUTION_SEQ)"; public final static String CREATE_BATCH_JOB_SEQUENCE = "CREATE TABLE BATCH_JOB_SEQ (\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ")"; + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; public final static String INSERT_BATCH_JOB_SEQUENCE = "INSERT INTO BATCH_JOB_SEQ (ID, UNIQUE_KEY) select * from (select 0 as ID, '0' as UNIQUE_KEY) as tmp where not exists(select * from BATCH_JOB_SEQ)"; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java index ae6119c0a1..de92311001 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V2__Add_Descriptions_And_OriginalDefinition.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ * @author Daniel Serleg * @author Ilayaperumal Gopinathan * @author Michael Minella + * @author Chris Bono * * @since 2.3 */ diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java index cc76048696..0583e0fb18 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V3__Add_Platform_To_AuditRecords.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ * This migration class adds platformName column to audit_records. * * @author Daniel Serleg + * @author Chris Bono * * @since 2.4 */ diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java index cea57d5dae..e13053ced2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V4__Add_Step_Name_Indexes.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 the original author or authors. + * Copyright 2020-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ * This migration class adds index for STEP_NAME on BATCH_STEP_EXECUTION. * * @author Glenn Renfro + * @author Chris Bono * * @since 2.7 */ diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..70d9696aa6 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..7622e5aee4 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..03a2af1b4e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,262 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + * @author Corneil du Plessis + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE TABLE BOOT3_TASK_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_TASK_SEQ = + "INSERT INTO BOOT3_TASK_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST TEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + private final static String INIT_TASK_EXECUTION_METADATA_SEQ = + "INSERT INTO BOOT3_TASK_EXECUTION_METADATA_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_TASK_EXECUTION_METADATA_SEQ)"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE\n" + + "(\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION\n" + + "(\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT\n" + + "(\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT\n" + + "(\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQ = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_STEP_EXECUTION_SEQ = + "INSERT INTO BOOT3_BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_STEP_EXECUTION_SEQ)"; + + private final static String CREATE_BATCH_JOB_EXECUTION_SEQ = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_JOB_EXECUTION_SEQ = + "INSERT INTO BOOT3_BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_JOB_EXECUTION_SEQ)"; + + private final static String CREATE_BATCH_JOB_SEQ = + "CREATE TABLE BOOT3_BATCH_JOB_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_JOB_SEQ = + "INSERT INTO BOOT3_BATCH_JOB_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_JOB_SEQ)"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(INIT_TASK_SEQ), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ_TABLE), + SqlCommand.from(INIT_TASK_EXECUTION_METADATA_SEQ)); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(INIT_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQ), + SqlCommand.from(INIT_BATCH_JOB_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_SEQ), + SqlCommand.from(INIT_BATCH_JOB_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java new file mode 100644 index 0000000000..4d003291dd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractCaseSensitiveMigration; +/** + * Since MySQL/MariaDB operates in a case-sensitive mode for table and column names we need TASK_ tables referenced with a prefix to be uppercase. + * @author Corneil du Plessis + */ +public class V8__RenameLowerCaseTables extends AbstractCaseSensitiveMigration { + public List getCommands() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_LC), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_TBL) + ); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java new file mode 100644 index 0000000000..c410878f66 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V9__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V10__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V10__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..6e85f9084d --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V10__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V10__DropJobConfigurationLocation extends AbstractMigration { + public V10__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V11__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V11__CreateBatchIndexes.java new file mode 100644 index 0000000000..e615099a17 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V11__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V11__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V12__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V12__CreateTaskParentIndex.java new file mode 100644 index 0000000000..acc9aa0186 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V12__CreateTaskParentIndex.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V12__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V13__Remove_Task2_Batch4_Support.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V13__Remove_Task2_Batch4_Support.java new file mode 100644 index 0000000000..7a34426f49 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V13__Remove_Task2_Batch4_Support.java @@ -0,0 +1,230 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractRemoveBatch4Task2Tables; + +/** + * Prefixes Task V2 tables and V4 Batch tables with a V2_ prefix as well as remove the BOOT3_ prefix for V3 task and v5 batch tables. + * + * @author Glenn Renfro + */ +public class V13__Remove_Task2_Batch4_Support extends AbstractRemoveBatch4Task2Tables { + + /* + * Scripts to remove views used for Task V2/Batch V4 Task V3/Batch V5 queries. + */ + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION = + "DROP VIEW AGGREGATE_TASK_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS = + "DROP VIEW AGGREGATE_TASK_EXECUTION_PARAMS"; + + private final static String DROP_VIEW_AGGREGATE_JOB_EXECUTION = + "DROP VIEW AGGREGATE_JOB_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_JOB_INSTANCE = + "DROP VIEW AGGREGATE_JOB_INSTANCE"; + + private final static String DROP_VIEW_AGGREGATE_TASK_BATCH = + "DROP VIEW AGGREGATE_TASK_BATCH"; + + private final static String DROP_VIEW_AGGREGATE_STEP_EXECUTION = + "DROP VIEW AGGREGATE_STEP_EXECUTION"; + + /* + * Scripts to rename Task V2 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V2_TABLE = + "ALTER TABLE TASK_EXECUTION RENAME TO V2_TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_PARAMS RENAME TO V2_TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V2_TABLE = + "ALTER TABLE TASK_TASK_BATCH RENAME TO V2_TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V2_TABLE = + "ALTER TABLE TASK_LOCK RENAME TO V2_TASK_LOCK"; + + private final static String RENAME_TASK_V2_SEQ = + "RENAME TASK_SEQ TO V2_TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_METADATA RENAME TO V2_TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_SEQ = + "RENAME TASK_EXECUTION_METADATA_SEQ TO V2_TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V4_TABLE = + "ALTER TABLE BATCH_JOB_INSTANCE RENAME TO V2_BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION RENAME TO V2_BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME TO V2_BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION RENAME TO V2_BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION_CONTEXT RENAME TO V2_BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_CONTEXT RENAME TO V2_BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_SEQ = + "RENAME BATCH_STEP_EXECUTION_SEQ TO V2_BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_SEQ = + "RENAME BATCH_JOB_EXECUTION_SEQ TO V2_BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V4_SEQ = + "RENAME BATCH_JOB_SEQ TO V2_BATCH_JOB_SEQ"; + + /* + * Scripts to rename Task V3 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION RENAME TO TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_PARAMS RENAME TO TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V3_TABLE = + "ALTER TABLE BOOT3_TASK_TASK_BATCH RENAME TO TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V3_TABLE = + "ALTER TABLE BOOT3_TASK_LOCK RENAME TO TASK_LOCK"; + + private final static String RENAME_TASK_V3_SEQ = + "RENAME BOOT3_TASK_SEQ TO TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_METADATA RENAME TO TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_SEQ = + "RENAME BOOT3_TASK_EXECUTION_METADATA_SEQ TO TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_INSTANCE RENAME TO BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION RENAME TO BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS RENAME TO BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION RENAME TO BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT RENAME TO BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT RENAME TO BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_SEQ = + "RENAME BOOT3_BATCH_STEP_EXECUTION_SEQ TO BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_SEQ = + "RENAME BOOT3_BATCH_JOB_EXECUTION_SEQ TO BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V5_SEQ = + "RENAME BOOT3_BATCH_JOB_SEQ TO BATCH_JOB_SEQ"; + + @Override + public List dropBoot3Boot2Views() { + return Arrays.asList( + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_INSTANCE), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_BATCH), + SqlCommand.from(DROP_VIEW_AGGREGATE_STEP_EXECUTION) + ); + } + + @Override + public List renameTask3Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(RENAME_TASK_V3_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_SEQ) + ); + } + + @Override + public List renameBatch5Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V5_SEQ) + ); + } + + @Override + public List renameTask2Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(RENAME_TASK_V2_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_SEQ) + ); + } + + @Override + public List renameBatch4Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V4_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java index 19eea4e91b..eab14faf8b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V1__Initial_Setup.java @@ -111,7 +111,7 @@ public class V1__Initial_Setup extends AbstractInitialSetupMigration { ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE SEQUENCE TASK_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + "CREATE SEQUENCE TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE TASK_LOCK (\n" + @@ -205,13 +205,13 @@ public class V1__Initial_Setup extends AbstractInitialSetupMigration { ")"; public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE"; + "CREATE SEQUENCE BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCYCLE"; public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE"; + "CREATE SEQUENCE BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCYCLE"; public final static String CREATE_BATCH_JOB_SEQUENCE = - "CREATE SEQUENCE BATCH_JOB_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCYCLE"; + "CREATE SEQUENCE BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCYCLE"; public V1__Initial_Setup() { super(null); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java index 931a429d18..b7580d3950 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V2__Add_Descriptions_And_OriginalDefinition.java @@ -55,7 +55,7 @@ public class V2__Add_Descriptions_And_OriginalDefinition extends BaseJavaMigrati ")"; private final static String CREATE_TASK_METADATA_SEQUENCE = - "CREATE SEQUENCE task_execution_metadata_seq START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; + "CREATE SEQUENCE task_execution_metadata_seq START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; private final SqlCommandsRunner runner = new SqlCommandsRunner(); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..f8aa5deadd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..c6f11a253c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..b148485574 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,203 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + "TASK_EXECUTION_ID NUMBER NOT NULL PRIMARY KEY ,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "TASK_NAME VARCHAR2(100),\n" + + "EXIT_CODE INTEGER,\n" + + "EXIT_MESSAGE VARCHAR2(2500),\n" + + "ERROR_MESSAGE VARCHAR2(2500),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + "EXTERNAL_EXECUTION_ID VARCHAR2(255),\n" + + "PARENT_EXECUTION_ID NUMBER\n" + + ")SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + "TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + "TASK_PARAM VARCHAR2(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + "TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + "JOB_EXECUTION_ID NUMBER NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + "LOCK_KEY VARCHAR2(36) NOT NULL,\n" + + "REGION VARCHAR2(100) NOT NULL,\n" + + "CLIENT_ID VARCHAR2(36),\n" + + "CREATED_DATE TIMESTAMP(9) NOT NULL,\n" + + "constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")SEGMENT CREATION IMMEDIATE"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID NUMBER NOT NULL,\n" + + " TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST CLOB,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 ORDER NOCYCLE"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + "JOB_INSTANCE_ID NUMBER(19,0) NOT NULL PRIMARY KEY ,\n" + + "VERSION NUMBER(19,0) ,\n" + + "JOB_NAME VARCHAR2(100 char) NOT NULL,\n" + + "JOB_KEY VARCHAR2(32 char) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "VERSION NUMBER(19,0),\n" + + "JOB_INSTANCE_ID NUMBER(19,0) NOT NULL,\n" + + "CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "STATUS VARCHAR2(10 char),\n" + + "EXIT_CODE VARCHAR2(2500 char),\n" + + "EXIT_MESSAGE VARCHAR2(2500 char),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + "JOB_CONFIGURATION_LOCATION VARCHAR(2500 char) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + "references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL,\n" + + "TYPE_CD VARCHAR2(6 char) NOT NULL,\n" + + "KEY_NAME VARCHAR2(100 char) NOT NULL,\n" + + "STRING_VAL VARCHAR2(250 char),\n" + + "DATE_VAL TIMESTAMP(9) DEFAULT NULL,\n" + + "LONG_VAL NUMBER(19,0),\n" + + "DOUBLE_VAL NUMBER,\n" + + "IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + "STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "VERSION NUMBER(19,0) NOT NULL,\n" + + "STEP_NAME VARCHAR2(100 char) NOT NULL,\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL,\n" + + "CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "STATUS VARCHAR2(10 char),\n" + + "COMMIT_COUNT NUMBER(19,0),\n" + + "READ_COUNT NUMBER(19,0),\n" + + "FILTER_COUNT NUMBER(19,0),\n" + + "WRITE_COUNT NUMBER(19,0),\n" + + "READ_SKIP_COUNT NUMBER(19,0),\n" + + "WRITE_SKIP_COUNT NUMBER(19,0),\n" + + "PROCESS_SKIP_COUNT NUMBER(19,0),\n" + + "ROLLBACK_COUNT NUMBER(19,0),\n" + + "EXIT_CODE VARCHAR2(2500 char),\n" + + "EXIT_MESSAGE VARCHAR2(2500 char),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + "STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL,\n" + + "SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL,\n" + + "SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ)); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java new file mode 100644 index 0000000000..75f9af4815 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V9__Boot3_Batch5_Job_Execution_Params_Column_Fix.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V9__Boot3_Batch5_Job_Execution_Params_Column_Fix.java new file mode 100644 index 0000000000..441402c34b --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V9__Boot3_Batch5_Job_Execution_Params_Column_Fix.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * Fixes the names of the {@code BOOT3_BATCH_JOB_EXECUTION_PARAMS} parameter columns. + * + * @author Chris Bono + */ +public class V9__Boot3_Batch5_Job_Execution_Params_Column_Fix extends BaseJavaMigration { + + public final static String DROP_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS DROP (TYPE_CD, KEY_NAME, STRING_VAL, DATE_VAL, " + + "LONG_VAL, DOUBLE_VAL, IDENTIFYING)"; + + public final static String ADD_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS ADD (\n" + + " PARAMETER_NAME VARCHAR(100 char) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100 char) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500 char),\n" + + " IDENTIFYING CHAR(1) NOT NULL\n" + + ")"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(DROP_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(ADD_COLUMNS_BATCH_JOB_EXECUTION_PARAMS_TABLE))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V10__ChangeTextTypes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V10__ChangeTextTypes.java new file mode 100644 index 0000000000..7d4cba6cef --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V10__ChangeTextTypes.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.PostgreSQLTextToOID; + +public class V10__ChangeTextTypes extends AbstractMigration { + + public V10__ChangeTextTypes() { + super(null); + } + + + @Override + public void migrate(Context context) throws Exception { + PostgreSQLTextToOID.convertColumnFromOID("app_registration", "id", "uri", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("app_registration", "id", "metadata_uri", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("stream_definitions", "definition_name", "definition", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("stream_definitions", "definition_name", "original_definition", context.getConfiguration().getDataSource()); + PostgreSQLTextToOID.convertColumnFromOID("task_definitions", "definition_name", "definition", context.getConfiguration().getDataSource()); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V11__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V11__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..6f654936e4 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V11__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V11__DropJobConfigurationLocation extends AbstractMigration { + public V11__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V12__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V12__CreateBatchIndexes.java new file mode 100644 index 0000000000..7f29bba3ff --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V12__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V12__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V13__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V13__CreateTaskParentIndex.java new file mode 100644 index 0000000000..3e4fca9e45 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V13__CreateTaskParentIndex.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V13__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V14__Remove_Task2_Batch4_Support.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V14__Remove_Task2_Batch4_Support.java new file mode 100644 index 0000000000..e91047799f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V14__Remove_Task2_Batch4_Support.java @@ -0,0 +1,230 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractRemoveBatch4Task2Tables; + +/** + * Prefixes Task V2 tables and V4 Batch tables with a V2_ prefix as well as remove the BOOT3_ prefix for V3 task and v5 batch tables. + * + * @author Glenn Renfro + */ +public class V14__Remove_Task2_Batch4_Support extends AbstractRemoveBatch4Task2Tables { + + /* + * Scripts to remove views used for Task V2/Batch V4 Task V3/Batch V5 queries. + */ + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION = + "DROP VIEW AGGREGATE_TASK_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS = + "DROP VIEW AGGREGATE_TASK_EXECUTION_PARAMS"; + + private final static String DROP_VIEW_AGGREGATE_JOB_EXECUTION = + "DROP VIEW AGGREGATE_JOB_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_JOB_INSTANCE = + "DROP VIEW AGGREGATE_JOB_INSTANCE"; + + private final static String DROP_VIEW_AGGREGATE_TASK_BATCH = + "DROP VIEW AGGREGATE_TASK_BATCH"; + + private final static String DROP_VIEW_AGGREGATE_STEP_EXECUTION = + "DROP VIEW AGGREGATE_STEP_EXECUTION"; + + /* + * Scripts to rename Task V2 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V2_TABLE = + "ALTER TABLE TASK_EXECUTION RENAME TO V2_TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_PARAMS RENAME TO V2_TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V2_TABLE = + "ALTER TABLE TASK_TASK_BATCH RENAME TO V2_TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V2_TABLE = + "ALTER TABLE TASK_LOCK RENAME TO V2_TASK_LOCK"; + + private final static String RENAME_TASK_V2_SEQ = + "ALTER TABLE TASK_SEQ RENAME TO V2_TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_TABLE = + "ALTER TABLE TASK_EXECUTION_METADATA RENAME TO V2_TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_SEQ = + "ALTER TABLE TASK_EXECUTION_METADATA_SEQ RENAME TO V2_TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + private final static String RENAME_BATCH_JOB_INSTANCE_V4_TABLE = + "ALTER TABLE BATCH_JOB_INSTANCE RENAME TO V2_BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION RENAME TO V2_BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_PARAMS RENAME TO V2_BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION RENAME TO V2_BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_STEP_EXECUTION_CONTEXT RENAME TO V2_BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + "ALTER TABLE BATCH_JOB_EXECUTION_CONTEXT RENAME TO V2_BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_SEQ = + "ALTER TABLE BATCH_STEP_EXECUTION_SEQ RENAME TO V2_BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_SEQ = + "ALTER TABLE BATCH_JOB_EXECUTION_SEQ RENAME TO V2_BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V4_SEQ = + "ALTER TABLE BATCH_JOB_SEQ RENAME TO V2_BATCH_JOB_SEQ"; + + /* + * Scripts to rename Task V3 tables removing BOOT_ prefix. + */ + + private final static String RENAME_TASK_EXECUTION_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION RENAME TO TASK_EXECUTION"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_PARAMS RENAME TO TASK_EXECUTION_PARAMS"; + + private final static String RENAME_TASK_TASK_BATCH_V3_TABLE = + "ALTER TABLE BOOT3_TASK_TASK_BATCH RENAME TO TASK_TASK_BATCH"; + + private final static String RENAME_TASK_LOCK_V3_TABLE = + "ALTER TABLE BOOT3_TASK_LOCK RENAME TO TASK_LOCK"; + + private final static String RENAME_TASK_V3_SEQ = + "ALTER TABLE BOOT3_TASK_SEQ RENAME TO TASK_SEQ"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_TABLE = + "ALTER TABLE BOOT3_TASK_EXECUTION_METADATA RENAME TO TASK_EXECUTION_METADATA"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_SEQ = + "ALTER TABLE BOOT3_TASK_EXECUTION_METADATA_SEQ RENAME TO TASK_EXECUTION_METADATA_SEQ"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_INSTANCE RENAME TO BATCH_JOB_INSTANCE"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION RENAME TO BATCH_JOB_EXECUTION"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS RENAME TO BATCH_JOB_EXECUTION_PARAMS"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION RENAME TO BATCH_STEP_EXECUTION"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT RENAME TO BATCH_STEP_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT RENAME TO BATCH_JOB_EXECUTION_CONTEXT"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_STEP_EXECUTION_SEQ RENAME TO BATCH_STEP_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_JOB_EXECUTION_SEQ RENAME TO BATCH_JOB_EXECUTION_SEQ"; + + private final static String RENAME_BATCH_JOB_V5_SEQ = + "ALTER TABLE BOOT3_BATCH_JOB_SEQ RENAME TO BATCH_JOB_SEQ"; + + @Override + public List dropBoot3Boot2Views() { + return Arrays.asList( + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_INSTANCE), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_BATCH), + SqlCommand.from(DROP_VIEW_AGGREGATE_STEP_EXECUTION) + ); + } + + @Override + public List renameTask3Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(RENAME_TASK_V3_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_SEQ) + ); + } + + @Override + public List renameBatch5Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V5_SEQ) + ); + } + + @Override + public List renameTask2Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(RENAME_TASK_V2_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_SEQ) + ); + } + + @Override + public List renameBatch4Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V4_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..4be04498bc --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..fdaee27127 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..1898bab5d3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,205 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE TIMESTAMP NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST TEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ");"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP NOT NULL,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP NOT NULL,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java new file mode 100644 index 0000000000..f85e45b606 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java new file mode 100644 index 0000000000..68c6e22d09 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java @@ -0,0 +1,34 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; + + +public class V9__ChangeTextTypes extends AbstractMigration { + + public V9__ChangeTextTypes() { + super(null); + } + + + @Override + public void migrate(Context context) throws Exception { + // perform no conversions + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java index 69625e1561..490808d77d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/R__Hibernate_Sequence.java @@ -74,7 +74,7 @@ public void migrate(Context context) throws Exception { logger.info("Looks like we have hibernate_sequence table, initiate fix"); } catch (Exception e) { - logger.debug("Unable to query hibernate_sequence table, looks like we have a proper sequence", e); + logger.debug("Unable to query hibernate_sequence table, looks like we have a proper sequence" + e); } // will result call to get commands from this class and then we choose which ones to run super.migrate(context); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V10__CreateBatchIndexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V10__CreateBatchIndexes.java new file mode 100644 index 0000000000..2838935906 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V10__CreateBatchIndexes.java @@ -0,0 +1,22 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateBatchIndexesMigration; + +public class V10__CreateBatchIndexes extends AbstractCreateBatchIndexesMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V11__CreateTaskParentIndex.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V11__CreateTaskParentIndex.java new file mode 100644 index 0000000000..0ba946be56 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V11__CreateTaskParentIndex.java @@ -0,0 +1,23 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractCreateTaskParentIndexMigration; + +public class V11__CreateTaskParentIndex extends AbstractCreateTaskParentIndexMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V12__Remove_Task2_Batch4_Support.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V12__Remove_Task2_Batch4_Support.java new file mode 100644 index 0000000000..5d6dda69f0 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V12__Remove_Task2_Batch4_Support.java @@ -0,0 +1,230 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractRemoveBatch4Task2Tables; + +/** + * Prefixes Task V2 tables and V4 Batch tables with a V2_ prefix as well as remove the BOOT3_ prefix for V3 task and v5 batch tables. + * + * @author Glenn Renfro + */ +public class V12__Remove_Task2_Batch4_Support extends AbstractRemoveBatch4Task2Tables { + + /* + * Scripts to remove views used for Task V2/Batch V4 Task V3/Batch V5 queries. + */ + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION = + "DROP VIEW AGGREGATE_TASK_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS = + "DROP VIEW AGGREGATE_TASK_EXECUTION_PARAMS"; + + private final static String DROP_VIEW_AGGREGATE_JOB_EXECUTION = + "DROP VIEW AGGREGATE_JOB_EXECUTION"; + + private final static String DROP_VIEW_AGGREGATE_JOB_INSTANCE = + "DROP VIEW AGGREGATE_JOB_INSTANCE"; + + private final static String DROP_VIEW_AGGREGATE_TASK_BATCH = + "DROP VIEW AGGREGATE_TASK_BATCH"; + + private final static String DROP_VIEW_AGGREGATE_STEP_EXECUTION = + "DROP VIEW AGGREGATE_STEP_EXECUTION"; + + /* + * Scripts to rename Task V2 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V2_TABLE = + "exec sp_rename 'TASK_EXECUTION', 'V2_TASK_EXECUTION'"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V2_TABLE = + "exec sp_rename 'TASK_EXECUTION_PARAMS', 'V2_TASK_EXECUTION_PARAMS'"; + + private final static String RENAME_TASK_TASK_BATCH_V2_TABLE = + "exec sp_rename 'TASK_TASK_BATCH', 'V2_TASK_TASK_BATCH'"; + + private final static String RENAME_TASK_LOCK_V2_TABLE = + "exec sp_rename 'TASK_LOCK', 'V2_TASK_LOCK'"; + + private final static String RENAME_TASK_V2_SEQ = + "exec sp_rename 'TASK_SEQ', 'V2_TASK_SEQ'"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_TABLE = + "exec sp_rename 'TASK_EXECUTION_METADATA', 'V2_TASK_EXECUTION_METADATA'"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V2_SEQ = + "exec sp_rename 'TASK_EXECUTION_METADATA_SEQ', 'V2_TASK_EXECUTION_METADATA_SEQ'"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V4_TABLE = + "exec sp_rename 'BATCH_JOB_INSTANCE', 'V2_BATCH_JOB_INSTANCE'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_TABLE = + "exec sp_rename 'BATCH_JOB_EXECUTION', 'V2_BATCH_JOB_EXECUTION'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE = + "exec sp_rename 'BATCH_JOB_EXECUTION_PARAMS', 'V2_BATCH_JOB_EXECUTION_PARAMS'"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_TABLE = + "exec sp_rename 'BATCH_STEP_EXECUTION', 'V2_BATCH_STEP_EXECUTION'"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE = + "exec sp_rename 'BATCH_STEP_EXECUTION_CONTEXT', 'V2_BATCH_STEP_EXECUTION_CONTEXT'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE = + "exec sp_rename 'BATCH_JOB_EXECUTION_CONTEXT', 'V2_BATCH_JOB_EXECUTION_CONTEXT'"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V4_SEQ = + "exec sp_rename 'BATCH_STEP_EXECUTION_SEQ', 'V2_BATCH_STEP_EXECUTION_SEQ'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V4_SEQ = + "exec sp_rename 'BATCH_JOB_EXECUTION_SEQ', 'V2_BATCH_JOB_EXECUTION_SEQ'"; + + private final static String RENAME_BATCH_JOB_V4_SEQ = + "exec sp_rename 'BATCH_JOB_SEQ', 'V2_BATCH_JOB_SEQ'"; + + /* + * Scripts to rename Task V3 tables removing BOOT_ prefix. + */ + private final static String RENAME_TASK_EXECUTION_V3_TABLE = + "exec sp_rename 'BOOT3_TASK_EXECUTION', 'TASK_EXECUTION'"; + + private final static String RENAME_TASK_EXECUTION_PARAMS_V3_TABLE = + "exec sp_rename 'BOOT3_TASK_EXECUTION_PARAMS', 'TASK_EXECUTION_PARAMS'"; + + private final static String RENAME_TASK_TASK_BATCH_V3_TABLE = + "exec sp_rename 'BOOT3_TASK_TASK_BATCH', 'TASK_TASK_BATCH'"; + + private final static String RENAME_TASK_LOCK_V3_TABLE = + "exec sp_rename 'BOOT3_TASK_LOCK', 'TASK_LOCK'"; + + private final static String RENAME_TASK_V3_SEQ = + "exec sp_rename 'BOOT3_TASK_SEQ', 'TASK_SEQ'"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_TABLE = + "exec sp_rename 'BOOT3_TASK_EXECUTION_METADATA', 'TASK_EXECUTION_METADATA'"; + + private final static String RENAME_TASK_EXECUTION_METADATA_V3_SEQ = + "exec sp_rename 'BOOT3_TASK_EXECUTION_METADATA_SEQ', 'TASK_EXECUTION_METADATA_SEQ'"; + + /* + * Scripts to rename Batch V5 tables removing BOOT_ prefix. + */ + + private final static String RENAME_BATCH_JOB_INSTANCE_V5_TABLE = + "exec sp_rename 'BOOT3_BATCH_JOB_INSTANCE', 'BATCH_JOB_INSTANCE'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_TABLE = + "exec sp_rename 'BOOT3_BATCH_JOB_EXECUTION', 'BATCH_JOB_EXECUTION'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE = + "exec sp_rename 'BOOT3_BATCH_JOB_EXECUTION_PARAMS', 'BATCH_JOB_EXECUTION_PARAMS'"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_TABLE = + "exec sp_rename 'BOOT3_BATCH_STEP_EXECUTION', 'BATCH_STEP_EXECUTION'"; + + private final static String RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE = + "exec sp_rename 'BOOT3_BATCH_STEP_EXECUTION_CONTEXT', 'BATCH_STEP_EXECUTION_CONTEXT'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE = + "exec sp_rename 'BOOT3_BATCH_JOB_EXECUTION_CONTEXT', 'BATCH_JOB_EXECUTION_CONTEXT'"; + + private final static String RENAME_BATCH_STEP_EXECUTION_V5_SEQ = + "exec sp_rename 'BOOT3_BATCH_STEP_EXECUTION_SEQ', 'BATCH_STEP_EXECUTION_SEQ'"; + + private final static String RENAME_BATCH_JOB_EXECUTION_V5_SEQ = + "exec sp_rename 'BOOT3_BATCH_JOB_EXECUTION_SEQ', 'BATCH_JOB_EXECUTION_SEQ'"; + + private final static String RENAME_BATCH_JOB_V5_SEQ = + "exec sp_rename 'BOOT3_BATCH_JOB_SEQ', 'BATCH_JOB_SEQ'"; + + @Override + public List dropBoot3Boot2Views() { + return Arrays.asList( + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_EXECUTION_PARAMS), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_EXECUTION), + SqlCommand.from(DROP_VIEW_AGGREGATE_JOB_INSTANCE), + SqlCommand.from(DROP_VIEW_AGGREGATE_TASK_BATCH), + SqlCommand.from(DROP_VIEW_AGGREGATE_STEP_EXECUTION) + ); + } + + @Override + public List renameTask3Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V3_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V3_TABLE), + SqlCommand.from(RENAME_TASK_V3_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V3_SEQ) + ); + } + + @Override + public List renameBatch5Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V5_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V5_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V5_SEQ) + ); + } + + @Override + public List renameTask2Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_PARAMS_V2_TABLE), + SqlCommand.from(RENAME_TASK_TASK_BATCH_V2_TABLE), + SqlCommand.from(RENAME_TASK_V2_SEQ), + SqlCommand.from(RENAME_TASK_LOCK_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_TABLE), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_V2_SEQ) + ); + } + + @Override + public List renameBatch4Tables() { + return Arrays.asList( + SqlCommand.from(RENAME_BATCH_JOB_INSTANCE_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_PARAMS_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_CONTEXT_V4_TABLE), + SqlCommand.from(RENAME_BATCH_STEP_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_EXECUTION_V4_SEQ), + SqlCommand.from(RENAME_BATCH_JOB_V4_SEQ) + ); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V1__Initial_Setup.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V1__Initial_Setup.java index d09432ec86..7460efe8dd 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V1__Initial_Setup.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V1__Initial_Setup.java @@ -1,5 +1,5 @@ /* - * Copyright 2019 the original author or authors. + * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -110,9 +110,6 @@ public class V1__Initial_Setup extends AbstractInitialSetupMigration { " references TASK_EXECUTION(TASK_EXECUTION_ID)\n" + ")"; - private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE TABLE TASK_SEQ (ID BIGINT IDENTITY)"; - public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE TASK_LOCK (\n" + " LOCK_KEY CHAR(36) NOT NULL,\n" + @@ -258,7 +255,7 @@ public List createTaskTables() { SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), SqlCommand.from(CREATE_TASK_TASK_BATCH), - SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + // NOTE: TASK_SEQ handled in R__Task_Sequence SqlCommand.from(CREATE_TASK_LOCK_TABLE)); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V4_1__Task_Sequence.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V4_1__Task_Sequence.java new file mode 100644 index 0000000000..21da07bc1c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V4_1__Task_Sequence.java @@ -0,0 +1,82 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Arrays; +import java.util.List; + +import org.flywaydb.core.api.migration.Context; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.SingleConnectionDataSource; + +/** + * Migration ensuring that {@code TASK_SEQ} sequence exists. + * Done for {@code mssql} via java and suppressing error as it doesn't support + * "create sequence if not exists". Also we migrate from {@code TASK_SEQ} table + * into sequence by taking out a correct seq value. + * + * @author Janne Valkealahti + * + */ +public class V4_1__Task_Sequence extends AbstractMigration { + + private static final Logger logger = LoggerFactory.getLogger(V4_1__Task_Sequence.class); + + private final static List commands = Arrays.asList( + SqlCommand.from("create sequence TASK_SEQ start with 1 increment by 1", 2714)); + + // sequence of tsql commands to change table to sequence + // need to +1 as initial sequence is the one set + public final static List fixcommands = Arrays.asList( + SqlCommand.from("exec sp_rename 'TASK_SEQ', 'TASK_SEQ_OLD'; \n" + + "declare @max int;\n" + + "select @max = isnull(max(ID),0)+1 from TASK_SEQ_OLD;\n" + + "exec('create sequence TASK_SEQ start with ' + @max + ' increment by 1;');\n" + + "drop table TASK_SEQ_OLD;")); + + private boolean fixSequence; + + public V4_1__Task_Sequence() { + super(commands); + } + + @Override + public void migrate(Context context) throws Exception { + logger.info("About to check if mssql TASK_SEQ needs fix from table to a sequence"); + try { + JdbcTemplate jdbcTemplate = new JdbcTemplate(new SingleConnectionDataSource(context.getConnection(), true)); + // in case we have old wrongly created table, this command should succeed + jdbcTemplate.execute("select 1 from TASK_SEQ"); + fixSequence = true; + logger.info("Looks like we have TASK_SEQ table, initiate fix"); + } + catch (Exception e) { + logger.debug("Unable to query TASK_SEQ table, a TASK_SEQ sequence may already exist" + e); + } + // will result call to get commands from this class and then we choose which ones to run + super.migrate(context); + } + + @Override + public List getCommands() { + return fixSequence ? fixcommands : super.getCommands(); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V5__Add_Task_Execution_Params_Indexes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V5__Add_Task_Execution_Params_Indexes.java new file mode 100644 index 0000000000..81b99b0153 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V5__Add_Task_Execution_Params_Indexes.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Arrays; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.common.flyway.SqlCommandsRunner; + +/** + * This migration class adds index for TASK_EXECUTION_ID on TASK_EXECUTION_PARAMS. + * + * @author Claudio Tasso + * + * @since 2.10 + */ +public class V5__Add_Task_Execution_Params_Indexes extends BaseJavaMigration { + + public final static String ADD_INDEX_TO_STEP_EXECUTION_PARAMS = "create index TASK_EXECUTION_ID_IDX on TASK_EXECUTION_PARAMS (TASK_EXECUTION_ID)"; + + private final SqlCommandsRunner runner = new SqlCommandsRunner(); + + @Override + public void migrate(Context context) throws Exception { + runner.execute(context.getConnection(), Arrays.asList( + SqlCommand.from(ADD_INDEX_TO_STEP_EXECUTION_PARAMS))); + + + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V6__Boot3_Boot_Version.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V6__Boot3_Boot_Version.java new file mode 100644 index 0000000000..5c8a16b492 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V6__Boot3_Boot_Version.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractBootVersionMigration; + +/** + * The boot_version field will provide for indicating the version of + * Spring Boot used by the application and by implication the + * schema version of task and batch tables. + * @author Corneil du Plessis + * @since 2.11 + */ +public class V6__Boot3_Boot_Version extends AbstractBootVersionMigration { +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java new file mode 100644 index 0000000000..3c1f1437c3 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -0,0 +1,202 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractBoot3InitialSetupMigration; + +/** + * Adds the spring-cloud-task V3 + * and spring-batch V5 + * schemas to support Boot3 compatability. + *

    Schemas have added table prefix of {@code "BOOT3_"}. + * + * @author Chris Bono + */ +public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { + + public final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME2 DEFAULT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME2 NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST VARCHAR(MAX) NULL,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + private final static String CREATE_TASK_SEQ_SEQUENCE = + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME2 NOT NULL,\n" + + " START_TIME DATETIME2 DEFAULT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME2 NOT NULL,\n" + + " START_TIME DATETIME2 NOT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + public final static String CREATE_BATCH_JOB_SEQUENCE = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; + + @Override + public List createTask3Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_TASK_TASK_BATCH), + SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ)); + } + + @Override + public List createBatch5Tables() { + return Arrays.asList( + SqlCommand.from(CREATE_BATCH_JOB_INSTANCE_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java new file mode 100644 index 0000000000..d5f32a2f27 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V9__DropJobConfigurationLocation.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V9__DropJobConfigurationLocation.java new file mode 100644 index 0000000000..871be2c6d6 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V9__DropJobConfigurationLocation.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import java.util.Collections; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.server.db.migration.DropColumnSqlCommands; + +/** + * Removes extra JOB_CONFIGURATION_LOCATION columns. + * @author Corneil du Plessis + */ +public class V9__DropJobConfigurationLocation extends AbstractMigration { + public V9__DropJobConfigurationLocation() { + super(Collections.singletonList(new DropColumnSqlCommands("BOOT3_BATCH_JOB_EXECUTION.JOB_CONFIGURATION_LOCATION"))); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java deleted file mode 100644 index f4f0af2734..0000000000 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2016 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.cloud.dataflow.server.job; - -import javax.sql.DataSource; - -import org.springframework.beans.factory.FactoryBean; -import org.springframework.cloud.task.repository.TaskExplorer; -import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.util.Assert; - -/** - * Factory bean to create a Task Explorer. - * - * @author Glenn Renfro - */ -public class TaskExplorerFactoryBean implements FactoryBean { - - private DataSource dataSource; - - private TaskExplorer taskExplorer; - - public TaskExplorerFactoryBean(DataSource dataSource) { - Assert.notNull(dataSource, "dataSource must not be null"); - this.dataSource = dataSource; - } - - @Override - public TaskExplorer getObject() throws Exception { - if (taskExplorer == null) { - taskExplorer = new SimpleTaskExplorer(new TaskExecutionDaoFactoryBean(dataSource)); - } - return taskExplorer; - } - - @Override - public Class getObjectType() { - return TaskExplorer.class; - } - - @Override - public boolean isSingleton() { - return true; - } - -} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java index a2bfa6c217..619ed4ba4a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionProgressInfo.java @@ -16,8 +16,9 @@ package org.springframework.cloud.dataflow.server.job.support; +import java.time.Duration; +import java.time.LocalDateTime; import java.util.Arrays; -import java.util.Date; import org.springframework.batch.core.StepExecution; import org.springframework.cloud.dataflow.rest.job.CumulativeHistory; @@ -51,18 +52,18 @@ public class StepExecutionProgressInfo { public StepExecutionProgressInfo(StepExecution stepExecution, StepExecutionHistory stepExecutionHistory) { this.stepExecution = stepExecution; this.stepExecutionHistory = stepExecutionHistory; - Date startTime = stepExecution.getStartTime(); - Date endTime = stepExecution.getEndTime(); + LocalDateTime startTime = stepExecution.getStartTime(); + LocalDateTime endTime = stepExecution.getEndTime(); if (endTime == null) { - endTime = new Date(); + endTime = LocalDateTime.now(); } else { isFinished = true; } if (startTime == null) { - startTime = new Date(); + startTime = LocalDateTime.now(); } - duration = endTime.getTime() - startTime.getTime(); + duration = Duration.between(startTime, endTime).toMillis(); percentageComplete = calculatePercentageComplete(); } @@ -109,7 +110,7 @@ private double calculatePercentageComplete() { double result = 0.0; if (readHistory.getMean() == 0) { percentCompleteBasis = PercentCompleteBasis.DURATION; - result = getDurationBasedEstimate(duration); + result = getDurationBasedEstimate(); } else { percentCompleteBasis = PercentCompleteBasis.READCOUNT; @@ -118,7 +119,7 @@ private double calculatePercentageComplete() { return result; } - private double getDurationBasedEstimate(double duration) { + private double getDurationBasedEstimate() { CumulativeHistory durationHistory = stepExecutionHistory.getDuration(); if (durationHistory.getMean() == 0) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java index 889ca09c2c..86aa35ae3c 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java @@ -18,12 +18,19 @@ import org.springframework.batch.core.Step; import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.cloud.dataflow.rest.job.support.StepType; import org.springframework.cloud.dataflow.rest.job.support.TaskletType; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; +import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; +import org.springframework.cloud.dataflow.server.controller.JobStepExecutionController; +import org.springframework.cloud.dataflow.server.controller.JobStepExecutionProgressController; import org.springframework.util.Assert; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Knows how to build a StepExecutionResource out of our domain model * {@link StepExecution}. @@ -34,7 +41,24 @@ public class StepExecutionResourceBuilder { static public StepExecutionResource toModel(StepExecution entity) { - return new StepExecutionResource(entity.getJobExecution().getId(), entity, generateStepType(entity)); + StepExecutionResource resource = new StepExecutionResource(entity.getJobExecution().getId(), entity, generateStepType(entity)); + try { + resource.add( + linkTo( + methodOn(JobStepExecutionController.class) + .getStepExecution(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId()) + ).withSelfRel() + ); + resource.add( + linkTo( + methodOn(JobStepExecutionProgressController.class) + .progress(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId()) + ).withRel("progress") + ); + } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) { + throw new RuntimeException(e); + } + return resource; } private static String generateStepType(StepExecution stepExecution) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java index cdf2eb6417..b0e5f8bdd0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDao.java @@ -74,6 +74,8 @@ public interface DataflowTaskExecutionDao { * Returns the size of all the task executions with the option to include only the completed executions. * @param onlyCompleted filter by completed task executions * @param taskName the task name, if null then retrieve all the tasks + * + * @return The count of task executions matching inputs. */ Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName); @@ -81,6 +83,8 @@ public interface DataflowTaskExecutionDao { * Returns all the task execution IDs of the completed task executions. * @param onlyCompleted filter by completed task executions * @param taskName the task name, if null then retrieve all the tasks + * + * @return The set of all execution ids matching inputs. */ Set getAllTaskExecutionIds(boolean onlyCompleted, String taskName); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java index c94bcb79e1..23af2b762f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java @@ -15,6 +15,7 @@ */ package org.springframework.cloud.dataflow.server.repository; +import java.util.Map; import java.util.Set; import org.springframework.cloud.dataflow.core.TaskManifest; @@ -24,10 +25,12 @@ * Data access object used for manipulating task manifests * * @author Michael Minella + * @author Corneil du Plessis * @since 2.3 */ public interface DataflowTaskExecutionMetadataDao { + /** * Saves a {@code TaskManifest} related to the supplied {@code TaskExecution} * @@ -51,6 +54,13 @@ public interface DataflowTaskExecutionMetadataDao { */ TaskManifest findManifestById(Long id); + /** + * Returns a collection of manifests mapped by id for the supplied ids. + * @param ids list of task execution ids. + * @return map of manifests with id as key. + */ + Map findManifestByIds(Set ids); + /** * Deletes the task manifest records associated with the collection of task execution ids provided. * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java new file mode 100644 index 0000000000..1b23b4166d --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.server.task.TaskDefinitionReader; + +/** + * Provide a simple interface for reading Task Definitions when required by Aggregate Task Explorer + * @author Corneil du Plessis + */ +public class DefaultTaskDefinitionReader implements TaskDefinitionReader { + private final TaskDefinitionRepository taskDefinitionRepository; + + public DefaultTaskDefinitionReader(TaskDefinitionRepository taskDefinitionRepository) { + this.taskDefinitionRepository = taskDefinitionRepository; + } + + @Override + public TaskDefinition findTaskDefinition(String taskName) { + return taskDefinitionRepository.findByTaskName(taskName); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java new file mode 100644 index 0000000000..57d24e9605 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.server.task.TaskDeploymentReader; + +/** + * Provide a simple interface for reading Task deployments when required by Aggregate Task Explorer + * @author Corneil du Plessis + */ +public class DefaultTaskDeploymentReader implements TaskDeploymentReader { + private final TaskDeploymentRepository taskDeploymentRepository; + + public DefaultTaskDeploymentReader(TaskDeploymentRepository taskDeploymentRepository) { + this.taskDeploymentRepository = taskDeploymentRepository; + } + + @Override + public TaskDeployment getDeployment(String externalTaskId) { + return taskDeploymentRepository.findByTaskDeploymentId(externalTaskId); + } + + @Override + public TaskDeployment getDeployment(String externalTaskId, String platform) { + return taskDeploymentRepository.findByTaskDeploymentIdAndPlatformName(externalTaskId, platform); + } + + @Override + public TaskDeployment findByDefinitionName(String definitionName) { + return taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(definitionName); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java index bd05d51e59..4aecb3b514 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java @@ -38,6 +38,7 @@ * be migrated to Spring Batch itself eventually. * * @author Gunnar Hillert + * @author Corneil du Plessis */ public class JdbcDataflowJobExecutionDao implements DataflowJobExecutionDao { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java index 0275db6fe2..35a374eaf8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java @@ -16,19 +16,21 @@ package org.springframework.cloud.dataflow.server.repository; -import java.sql.ResultSet; -import java.sql.SQLException; import java.sql.Types; import java.util.Collections; +import java.util.HashSet; import java.util.Set; import java.util.TreeSet; import javax.sql.DataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.ResultSetExtractor; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.util.Assert; @@ -42,36 +44,37 @@ * @author Gunnar Hillert * @author Glenn Renfro * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class JdbcDataflowTaskExecutionDao implements DataflowTaskExecutionDao { - + private final static Logger logger = LoggerFactory.getLogger(JdbcDataflowTaskExecutionDao.class); private final NamedParameterJdbcTemplate jdbcTemplate; private static final String DELETE_TASK_EXECUTIONS = "DELETE FROM %PREFIX%EXECUTION " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; private static final String DELETE_TASK_EXECUTION_PARAMS = "DELETE FROM %PREFIX%EXECUTION_PARAMS " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; private static final String DELETE_TASK_TASK_BATCH = "DELETE FROM %PREFIX%TASK_BATCH " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; - private static final String SELECT_CHILD_TASK_EXECUTION_IDS = "SELECT task_execution_id FROM %PREFIX%EXECUTION " - + "WHERE parent_execution_id in (:parentTaskExecutionIds)"; + private static final String SELECT_CHILD_TASK_EXECUTION_IDS = "SELECT TASK_EXECUTION_ID FROM %PREFIX%EXECUTION " + + "WHERE PARENT_EXECUTION_ID in (:parentTaskExecutionIds)"; private static final String FIND_TASK_EXECUTION_IDS_BY_TASK_NAME = "SELECT TASK_EXECUTION_ID " + "from %PREFIX%EXECUTION where TASK_NAME = :taskName"; - private static final String GET_COMPLETED_TASK_EXECUTIONS_COUNT = "SELECT COUNT(TASK_EXECUTION_ID) " + private static final String GET_COMPLETED_TASK_EXECUTIONS_COUNT = "SELECT COUNT(TASK_EXECUTION_ID) AS count " + "from %PREFIX%EXECUTION where END_TIME IS NOT NULL"; - private static final String GET_ALL_TASK_EXECUTIONS_COUNT = "SELECT COUNT(TASK_EXECUTION_ID) " + private static final String GET_ALL_TASK_EXECUTIONS_COUNT = "SELECT COUNT(TASK_EXECUTION_ID) AS count " + "from %PREFIX%EXECUTION"; - private static final String GET_COMPLETED_TASK_EXECUTIONS_COUNT_BY_TASK_NAME = "SELECT COUNT(TASK_EXECUTION_ID) " + private static final String GET_COMPLETED_TASK_EXECUTIONS_COUNT_BY_TASK_NAME = "SELECT COUNT(TASK_EXECUTION_ID) AS count " + "from %PREFIX%EXECUTION where END_TIME IS NOT NULL AND TASK_NAME = :taskName"; - private static final String GET_ALL_TASK_EXECUTIONS_COUNT_BY_TASK_NAME = "SELECT COUNT(TASK_EXECUTION_ID) " + private static final String GET_ALL_TASK_EXECUTIONS_COUNT_BY_TASK_NAME = "SELECT COUNT(TASK_EXECUTION_ID) AS count " + "from %PREFIX%EXECUTION where TASK_NAME = :taskName"; private static final String FIND_ALL_COMPLETED_TASK_EXECUTION_IDS = "SELECT TASK_EXECUTION_ID " @@ -87,7 +90,7 @@ public class JdbcDataflowTaskExecutionDao implements DataflowTaskExecutionDao { + "from %PREFIX%EXECUTION where TASK_NAME = :taskName"; - private TaskProperties taskProperties; + private final TaskProperties taskProperties; /** * @param dataSource used by the dao to execute queries and updates the tables. @@ -104,7 +107,7 @@ public JdbcDataflowTaskExecutionDao(DataSource dataSource, TaskProperties taskPr public int deleteTaskExecutionsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_EXECUTIONS); + final String query = SchemaUtilities.getQuery(DELETE_TASK_EXECUTIONS, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } @@ -112,7 +115,7 @@ public int deleteTaskExecutionsByTaskExecutionIds(Set taskExecutionIds) { public int deleteTaskExecutionParamsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_EXECUTION_PARAMS); + final String query = SchemaUtilities.getQuery(DELETE_TASK_EXECUTION_PARAMS, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } @@ -120,27 +123,24 @@ public int deleteTaskExecutionParamsByTaskExecutionIds(Set taskExecutionId public int deleteTaskTaskBatchRelationshipsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_TASK_BATCH); + final String query = SchemaUtilities.getQuery(DELETE_TASK_TASK_BATCH, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } - private String getQuery(String base) { - return StringUtils.replace(base, "%PREFIX%", this.taskProperties.getTablePrefix()); - } + @Override public Set findChildTaskExecutionIds(Set taskExecutionIds) { + logger.debug("findChildTaskExecutionIds:{}", taskExecutionIds); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("parentTaskExecutionIds", taskExecutionIds); Set childTaskExecutionIds; try { childTaskExecutionIds = this.jdbcTemplate.query( - getQuery(SELECT_CHILD_TASK_EXECUTION_IDS), queryParameters, - new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + SchemaUtilities.getQuery(SELECT_CHILD_TASK_EXECUTION_IDS, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set jobExecutionIds = new TreeSet<>(); @@ -150,19 +150,21 @@ public Set extractData(ResultSet resultSet) } return jobExecutionIds; - } }); + Assert.notNull(childTaskExecutionIds, "Expected childTaskExecutionIds"); } catch (DataAccessException e) { childTaskExecutionIds = Collections.emptySet(); } - if (!childTaskExecutionIds.isEmpty()) { - childTaskExecutionIds.addAll(this.findChildTaskExecutionIds(childTaskExecutionIds)); + Set newChildren = new HashSet<>(childTaskExecutionIds); + newChildren.removeAll(taskExecutionIds); + if(!newChildren.isEmpty()) { + childTaskExecutionIds.addAll(this.findChildTaskExecutionIds(newChildren)); + } } - + logger.debug("findChildTaskExecutionIds:childTaskExecutionIds={}", childTaskExecutionIds); return childTaskExecutionIds; - } @Override @@ -171,11 +173,10 @@ public Set getTaskExecutionIdsByTaskName(String taskName) { .addValue("taskName", taskName, Types.VARCHAR); try { - return this.jdbcTemplate.query(getQuery(FIND_TASK_EXECUTION_IDS_BY_TASK_NAME), - queryParameters, new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(FIND_TASK_EXECUTION_IDS_BY_TASK_NAME, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set taskExecutionIds = new TreeSet<>(); while (resultSet.next()) { @@ -183,7 +184,6 @@ public Set extractData(ResultSet resultSet) .add(resultSet.getLong("TASK_EXECUTION_ID")); } return taskExecutionIds; - } }); } catch (DataAccessException e) { @@ -193,60 +193,54 @@ public Set extractData(ResultSet resultSet) @Override public Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName) { - String QUERY = null; - MapSqlParameterSource queryParameters = null; + String QUERY; + MapSqlParameterSource queryParameters = new MapSqlParameterSource(); if (StringUtils.hasText(taskName)) { - queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName, Types.VARCHAR); + queryParameters.addValue("taskName", taskName, Types.VARCHAR); QUERY = (onlyCompleted) ? GET_COMPLETED_TASK_EXECUTIONS_COUNT_BY_TASK_NAME : GET_ALL_TASK_EXECUTIONS_COUNT_BY_TASK_NAME; } else { QUERY = (onlyCompleted) ? GET_COMPLETED_TASK_EXECUTIONS_COUNT: GET_ALL_TASK_EXECUTIONS_COUNT; } try { - return this.jdbcTemplate.query(getQuery(QUERY), - queryParameters, new ResultSetExtractor() { - @Override - public Integer extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(QUERY, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { if (resultSet.next()) { - return resultSet.getInt("COUNT(TASK_EXECUTION_ID)"); + return resultSet.getInt("count"); } - return Integer.valueOf(0); - } + return 0; }); } catch (DataAccessException e) { - return Integer.valueOf(0); + return 0; } } @Override public Set getAllTaskExecutionIds(boolean onlyCompleted, String taskName) { - String QUERY = null; - MapSqlParameterSource queryParameters = null; + String QUERY; + MapSqlParameterSource queryParameters = new MapSqlParameterSource(); if (StringUtils.hasText(taskName)) { - queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName, Types.VARCHAR); + queryParameters.addValue("taskName", taskName, Types.VARCHAR); QUERY = (onlyCompleted) ? FIND_ALL_COMPLETED_TASK_EXECUTION_IDS_BY_TASK_NAME : FIND_ALL_TASK_EXECUTION_IDS_BY_TASK_NAME; } else { QUERY = (onlyCompleted) ? FIND_ALL_COMPLETED_TASK_EXECUTION_IDS : FIND_ALL_TASK_EXECUTION_IDS; } try { - return this.jdbcTemplate.query(getQuery(QUERY), queryParameters, new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(QUERY, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set taskExecutionIds = new TreeSet<>(); while (resultSet.next()) { - taskExecutionIds - .add(resultSet.getLong("TASK_EXECUTION_ID")); + taskExecutionIds.add(resultSet.getLong("TASK_EXECUTION_ID")); } return taskExecutionIds; - } }); } catch (DataAccessException e) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java index 878875c93d..2d70de8974 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.server.repository; import java.io.IOException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; @@ -26,6 +27,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.module.SimpleModule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.cloud.dataflow.core.TaskManifest; import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; @@ -33,6 +36,7 @@ import org.springframework.cloud.dataflow.server.repository.support.AppDeploymentRequestMixin; import org.springframework.cloud.dataflow.server.repository.support.Order; import org.springframework.cloud.dataflow.server.repository.support.PagingQueryProvider; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.dataflow.server.repository.support.SqlPagingQueryProviderFactoryBean; import org.springframework.cloud.dataflow.server.service.impl.ResourceDeserializer; import org.springframework.cloud.dataflow.server.service.impl.ResourceMixin; @@ -52,23 +56,24 @@ * JDBC implementation for the {@code DataflowTaskExecutionMetadataDao} * * @author Michael Minella - * @since 2.3 + * @author Corneil du Plessis * @see DataflowTaskExecutionMetadataDao + * @since 2.3 */ public class JdbcDataflowTaskExecutionMetadataDao implements DataflowTaskExecutionMetadataDao { + private final static Logger logger = LoggerFactory.getLogger(JdbcDataflowTaskExecutionMetadataDao.class); - private static final String INSERT_SQL = "INSERT INTO task_execution_metadata (id, task_execution_id, " + - "task_execution_manifest) VALUES (:id, :taskExecutionId, :taskExecutionManifest)"; + private static final String INSERT_SQL = "INSERT INTO %PREFIX%EXECUTION_METADATA (ID, TASK_EXECUTION_ID, " + + "TASK_EXECUTION_MANIFEST) VALUES (:id, :taskExecutionId, :taskExecutionManifest)"; - private static final String FIND_MANIFEST_BY_TASK_EXECUTION_ID = "select m.task_execution_manifest as task_execution_manifest " + - "from task_execution_metadata m inner join " + - "TASK_EXECUTION e on m.task_execution_id = e.TASK_EXECUTION_ID " + - "where e.TASK_EXECUTION_ID = :taskExecutionId"; + private static final String FIND_MANIFEST_BY_TASK_EXECUTION_ID = "SELECT M.TASK_EXECUTION_MANIFEST AS TASK_EXECUTION_MANIFEST " + + "FROM %PREFIX%EXECUTION_METADATA M INNER JOIN " + + "%PREFIX%EXECUTION E ON M.TASK_EXECUTION_ID = E.TASK_EXECUTION_ID " + + "WHERE E.TASK_EXECUTION_ID = :taskExecutionId"; + private static final String FIND_MANIFEST_BY_TASK_EXECUTION_IDS = "SELECT M.TASK_EXECUTION_MANIFEST AS TASK_EXECUTION_MANIFEST, M.TASK_EXECUTION_ID AS TASK_EXECUTION_ID " + + "FROM %PREFIX%EXECUTION_METADATA M WHERE M.TASK_EXECUTION_ID in (:taskExecutionIds)"; - private static final String DELETE_MANIFEST_BY_TASK_EXECUTION_IDS = - "DELETE FROM task_execution_metadata " + - "WHERE task_execution_id " + - "IN (:taskExecutionIds)"; + private static final String DELETE_MANIFEST_BY_TASK_EXECUTION_IDS = "DELETE FROM %PREFIX%EXECUTION_METADATA WHERE TASK_EXECUTION_ID IN (:taskExecutionIds)"; private final NamedParameterJdbcTemplate jdbcTemplate; @@ -78,9 +83,14 @@ public class JdbcDataflowTaskExecutionMetadataDao implements DataflowTaskExecuti private final DataSource dataSource; - public JdbcDataflowTaskExecutionMetadataDao(DataSource dataSource, - DataFieldMaxValueIncrementer incrementer) { + private final String tablePrefix; + public JdbcDataflowTaskExecutionMetadataDao( + DataSource dataSource, + DataFieldMaxValueIncrementer incrementer, + String prefix + ) { + this.tablePrefix = prefix; this.incrementer = incrementer; this.jdbcTemplate = new NamedParameterJdbcTemplate(dataSource); @@ -88,7 +98,7 @@ public JdbcDataflowTaskExecutionMetadataDao(DataSource dataSource, this.objectMapper = new ObjectMapper(); SimpleModule module = new SimpleModule(); module.addDeserializer(Resource.class, - new ResourceDeserializer(new AppResourceCommon(new MavenProperties(), new DefaultResourceLoader()))); + new ResourceDeserializer(new AppResourceCommon(new MavenProperties(), new DefaultResourceLoader()))); this.objectMapper.registerModule(module); this.objectMapper.addMixIn(Resource.class, ResourceMixin.class); this.objectMapper.addMixIn(AppDefinition.class, AppDefinitionMixin.class); @@ -104,13 +114,14 @@ public void save(TaskExecution taskExecution, TaskManifest manifest) { final String manifestJson = this.objectMapper.writeValueAsString(manifest); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("id", incrementer.nextLongValue()) - .addValue("taskExecutionId", taskExecution.getExecutionId()) - .addValue("taskExecutionManifest", manifestJson); - - this.jdbcTemplate.update(INSERT_SQL, queryParameters); - } - catch (JsonProcessingException e) { + .addValue("id", incrementer.nextLongValue()) + .addValue("taskExecutionId", taskExecution.getExecutionId()) + .addValue("taskExecutionManifest", manifestJson); + + String sql = SchemaUtilities.getQuery(INSERT_SQL, tablePrefix); + logger.debug("save:sql={}, parameters={}", sql, queryParameters.getValues()); + this.jdbcTemplate.update(sql, queryParameters); + } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to serialize manifest", e); } } @@ -118,14 +129,16 @@ public void save(TaskExecution taskExecution, TaskManifest manifest) { @Override public TaskManifest getLatestManifest(String taskName) { Map sortKeys = new HashMap<>(1); - sortKeys.put("e.TASK_EXECUTION_ID", Order.DESCENDING); + sortKeys.put("E.TASK_EXECUTION_ID", Order.DESCENDING); SqlPagingQueryProviderFactoryBean sqlPagingQueryProviderFactoryBean = new SqlPagingQueryProviderFactoryBean(); sqlPagingQueryProviderFactoryBean.setDataSource(this.dataSource); - sqlPagingQueryProviderFactoryBean.setSelectClause("task_execution_manifest"); - sqlPagingQueryProviderFactoryBean.setFromClause("task_execution_metadata m inner join TASK_EXECUTION e on m.task_execution_id = e.TASK_EXECUTION_ID"); - sqlPagingQueryProviderFactoryBean.setWhereClause("e.TASK_NAME = :taskName"); + sqlPagingQueryProviderFactoryBean.setSelectClause("TASK_EXECUTION_MANIFEST"); + sqlPagingQueryProviderFactoryBean.setFromClause(SchemaUtilities.getQuery( + "%PREFIX%EXECUTION_METADATA M INNER JOIN %PREFIX%EXECUTION E ON M.TASK_EXECUTION_ID = E.TASK_EXECUTION_ID", + tablePrefix)); + sqlPagingQueryProviderFactoryBean.setWhereClause("E.TASK_NAME = :taskName"); sqlPagingQueryProviderFactoryBean.setSortKeys(sortKeys); try { @@ -134,22 +147,20 @@ public TaskManifest getLatestManifest(String taskName) { queryProvider.init(this.dataSource); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName); - - return this.jdbcTemplate.queryForObject(queryProvider.getPageQuery(PageRequest.of(0, 1)), - queryParameters, (resultSet, i) -> { - try { - return objectMapper.readValue(resultSet.getString("task_execution_manifest"), TaskManifest.class); - } - catch (IOException e) { - throw new IllegalArgumentException("Unable to deserialize manifest", e); - } - }); - } - catch (EmptyResultDataAccessException erdae) { + .addValue("taskName", taskName); + + String sql = queryProvider.getPageQuery(PageRequest.of(0, 1)); + logger.debug("getLatestManifest:sql={},parameters={}", sql, queryParameters.getValues()); + return this.jdbcTemplate.queryForObject(sql, queryParameters, (resultSet, i) -> { + try { + return objectMapper.readValue(resultSet.getString("TASK_EXECUTION_MANIFEST"), TaskManifest.class); + } catch (IOException e) { + throw new IllegalArgumentException("Unable to deserialize manifest", e); + } + }); + } catch (EmptyResultDataAccessException erdae) { return null; - } - catch (Exception e) { + } catch (Exception e) { throw new IllegalStateException("Unable to generate query", e); } } @@ -157,29 +168,56 @@ public TaskManifest getLatestManifest(String taskName) { @Override public TaskManifest findManifestById(Long id) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("taskExecutionId", id); + .addValue("taskExecutionId", id); try { - return this.jdbcTemplate.queryForObject(FIND_MANIFEST_BY_TASK_EXECUTION_ID, - queryParameters, - (resultSet, i) -> { - try { - return objectMapper.readValue(resultSet.getString("task_execution_manifest"), TaskManifest.class); - } - catch (IOException e) { - throw new IllegalArgumentException("Unable to deserialize manifest", e); - } - }); - } - catch (EmptyResultDataAccessException erdae) { + String sql = SchemaUtilities.getQuery(FIND_MANIFEST_BY_TASK_EXECUTION_ID, tablePrefix); + logger.debug("findManifestById:sql={}, parameters={}", sql, queryParameters); + return this.jdbcTemplate.queryForObject(sql, queryParameters, (resultSet, i) -> { + try { + return objectMapper.readValue(resultSet.getString("TASK_EXECUTION_MANIFEST"), TaskManifest.class); + } catch (IOException e) { + throw new IllegalArgumentException("Unable to deserialize manifest", e); + } + }); + } catch (EmptyResultDataAccessException erdae) { return null; } } + @Override + public Map findManifestByIds(Set ids) { + final MapSqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionIds", ids); + + try { + String sql = SchemaUtilities.getQuery(FIND_MANIFEST_BY_TASK_EXECUTION_IDS, tablePrefix); + logger.debug("findManifestByIds:sql={}, parameters={}", sql, queryParameters); + final Map result = new HashMap<>(); + this.jdbcTemplate.query(sql, queryParameters, rs -> { + try { + String executionManifest = rs.getString("TASK_EXECUTION_MANIFEST"); + if(executionManifest != null && !executionManifest.trim().isEmpty()) { + result.put(rs.getLong("TASK_EXECUTION_ID"), + objectMapper.readValue(executionManifest, TaskManifest.class)); + } + } + catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }); + return result; + } catch (EmptyResultDataAccessException erdae) { + return Collections.emptyMap(); + } + } + @Override public int deleteManifestsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() - .addValue("taskExecutionIds", taskExecutionIds); - return this.jdbcTemplate.update(DELETE_MANIFEST_BY_TASK_EXECUTION_IDS, queryParameters); + .addValue("taskExecutionIds", taskExecutionIds); + String sql = SchemaUtilities.getQuery(DELETE_MANIFEST_BY_TASK_EXECUTION_IDS, tablePrefix); + logger.debug("deleteManifestsByTaskExecutionIds:sql={}, parameters={}", sql, queryParameters); + return this.jdbcTemplate.update(sql, queryParameters); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java index f7ed7b657a..56578fbcf5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java @@ -39,6 +39,9 @@ public class NoSuchTaskExecutionException extends RuntimeException { public NoSuchTaskExecutionException(long id) { super("Could not find TaskExecution with id " + id); } + public NoSuchTaskExecutionException(String externalExecutionId, String platform) { + super("Could not find TaskExecution with id " + externalExecutionId + " for platform " + platform); + } /** * Create a new exception that handles multiple {@link TaskExecution} ids. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java index b4c348f1f7..b7db0b31cf 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java @@ -32,7 +32,7 @@ public interface TaskDefinitionRepository extends KeyValueRepository findByTaskNameContains(String taskName, Pageable pageable); - Page findByTaskNameContainsAndDslTextContains(String taskName, String dslText, Pageable pageable); + Page findByDescriptionContains(String description, Pageable pageable); Page findByDslTextContains(String dslText, Pageable pageable); @@ -42,4 +42,6 @@ public interface TaskDefinitionRepository extends KeyValueRepository keyWordString.length()) { + if (temp.toLowerCase(Locale.ROOT).startsWith(keyWordString) && temp.length() > keyWordString.length()) { return temp.substring(keyWordString.length()); } else { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java index 48b2268ceb..79b604482a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/H2PagingQueryProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,14 +22,15 @@ * H2 implementation of a {@link PagingQueryProvider} using database specific features. * * @author Glenn Renfro + * @author Chris Bono */ public class H2PagingQueryProvider extends AbstractSqlPagingQueryProvider { @Override public String getPageQuery(Pageable pageable) { - String topClause = new StringBuilder().append("LIMIT ").append(pageable.getOffset()).append(" ") - .append(pageable.getPageSize()).toString(); - return SqlPagingQueryUtils.generateTopJumpToQuery(this, topClause); + String limitClause = new StringBuilder().append("OFFSET ") + .append(pageable.getOffset()).append(" ROWS FETCH NEXT ") + .append(pageable.getPageSize()).append(" ROWS ONLY").toString(); + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, limitClause); } - } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java new file mode 100644 index 0000000000..5844c0ef56 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository.support; + +import org.springframework.data.domain.Pageable; + +/** + * MySQL implementation of a {@link PagingQueryProvider} using database specific features. + * + * @author Glenn Renfro + * @author Corneil du Plessis + */ +public class MariaDBPagingQueryProvider extends AbstractSqlPagingQueryProvider { + @Override + public String getPageQuery(Pageable pageable) { + String topClause = "LIMIT " + pageable.getOffset() + ", " + + pageable.getPageSize(); + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, topClause); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java new file mode 100644 index 0000000000..e5516585bd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java @@ -0,0 +1,15 @@ +package org.springframework.cloud.dataflow.server.repository.support; + +import org.springframework.util.StringUtils; + +public class SchemaUtilities { + private SchemaUtilities() { + } + + public static String getQuery(String query, String prefix, String defaultPrefix) { + return StringUtils.replace(query, "%PREFIX%", prefix != null ? prefix : defaultPrefix); + } + public static String getQuery(String query, String prefix) { + return StringUtils.replace(query, "%PREFIX%", prefix); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java index 776e8e504a..cf7002cb25 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java @@ -16,12 +16,15 @@ package org.springframework.cloud.dataflow.server.repository.support; +import java.util.Collections; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import javax.sql.DataSource; import org.springframework.beans.factory.FactoryBean; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -47,16 +50,19 @@ public class SqlPagingQueryProviderFactoryBean implements FactoryBean sortKeys; - private Map providers = new HashMap(); - - { - providers.put(DatabaseType.HSQL, new HsqlPagingQueryProvider()); - providers.put(DatabaseType.H2, new H2PagingQueryProvider()); - providers.put(DatabaseType.MYSQL, new MySqlPagingQueryProvider()); - providers.put(DatabaseType.POSTGRES, new PostgresPagingQueryProvider()); - providers.put(DatabaseType.ORACLE, new OraclePagingQueryProvider()); - providers.put(DatabaseType.SQLSERVER, new SqlServerPagingQueryProvider()); - providers.put(DatabaseType.DB2, new Db2PagingQueryProvider()); + private final static Map providers; + + static { + Map providerMap = new HashMap(); + providerMap.put(DatabaseType.HSQL, new HsqlPagingQueryProvider()); + providerMap.put(DatabaseType.H2, new H2PagingQueryProvider()); + providerMap.put(DatabaseType.MYSQL, new MySqlPagingQueryProvider()); + providerMap.put(DatabaseType.MARIADB, new MariaDBPagingQueryProvider()); + providerMap.put(DatabaseType.POSTGRES, new PostgresPagingQueryProvider()); + providerMap.put(DatabaseType.ORACLE, new OraclePagingQueryProvider()); + providerMap.put(DatabaseType.SQLSERVER, new SqlServerPagingQueryProvider()); + providerMap.put(DatabaseType.DB2, new Db2PagingQueryProvider()); + providers = Collections.unmodifiableMap(providerMap); } /** @@ -116,7 +122,7 @@ public PagingQueryProvider getObject() throws Exception { DatabaseType type; try { - type = databaseType != null ? DatabaseType.valueOf(databaseType.toUpperCase()) + type = databaseType != null ? DatabaseType.valueOf(databaseType.toUpperCase(Locale.ROOT)) : DatabaseType.fromMetaData(dataSource); } catch (MetaDataAccessException e) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/UpperCaseSpringPhysicalNamingStrategy.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/UpperCaseSpringPhysicalNamingStrategy.java index 89835fb4e9..9ec5028187 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/UpperCaseSpringPhysicalNamingStrategy.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/UpperCaseSpringPhysicalNamingStrategy.java @@ -15,16 +15,15 @@ */ package org.springframework.cloud.dataflow.server.repository.support; +import org.hibernate.boot.model.naming.CamelCaseToUnderscoresNamingStrategy; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; -import org.springframework.boot.orm.jpa.hibernate.SpringPhysicalNamingStrategy; - /** * Override {@code isCaseInsensitive} to always return false * * @author Mark Pollack */ -public class UpperCaseSpringPhysicalNamingStrategy extends SpringPhysicalNamingStrategy { +public class UpperCaseSpringPhysicalNamingStrategy extends CamelCaseToUnderscoresNamingStrategy { @Override protected boolean isCaseInsensitive(JdbcEnvironment jdbcEnvironment) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java index 01a9cf24f5..bf4251c41f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/LauncherInitializationService.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.server.service; import java.util.List; +import java.util.Locale; import java.util.stream.Collectors; import org.slf4j.Logger; @@ -60,17 +61,17 @@ public void initialize(ApplicationReadyEvent event) { launcher.getType()); launcher.setOptions(options); this.launcherRepository.save(launcher); - logger.info(String.format( - "Added '%s' platform account '%s' into Task Launcher repository.", + logger.info( + "Added '{}' platform account '{}' into Task Launcher repository.", platform.getName(), - launcher.getName())); + launcher.getName()); }); }); } private List createMetadataPropertyEntities( List metadataProperties, String type) { - String prefix = KEY_PREFIX + type.toLowerCase(); + String prefix = KEY_PREFIX + type.toLowerCase(Locale.ROOT); return metadataProperties.stream() .filter(p -> p.getId().startsWith(prefix)) .map(ConfigurationMetadataPropertyEntity::new) diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java index 6ddc22fe80..644e2a6f7e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SpringSecurityAuditorAware.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.service; import java.util.Optional; @@ -26,6 +27,7 @@ /** * * @author Gunnar Hillert + * @author Corneil du Plessis * */ public class SpringSecurityAuditorAware implements AuditorAware { @@ -41,8 +43,8 @@ public Optional getCurrentAuditor() { final boolean authenticationEnabled = securityStateBean.isAuthenticationEnabled(); if (authenticationEnabled && SecurityContextHolder.getContext() != null) { final Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); - if (!(authentication instanceof AnonymousAuthenticationToken)) { - return Optional.of(authentication.getName()); + if (authentication != null && !(authentication instanceof AnonymousAuthenticationToken)) { + return Optional.ofNullable(authentication.getName()); } } return Optional.ofNullable(null); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java index d74167e2cc..5b1b999f0a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/StreamService.java @@ -96,11 +96,13 @@ public interface StreamService { * @param description description of the stream definition * @param deploy if {@code true}, the stream is deployed upon creation (default is * {@code false}) + * @param deploymentProperties the optional deployment properties to use when the stream is deployed upon creation * @return the created stream definition already exists * @throws InvalidStreamDefinitionException if there are errors in parsing the stream DSL, * resolving the name, or type of applications in the stream */ - StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy); + StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy, + Map deploymentProperties); /** * Deploys the stream with the user provided deployment properties. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java index 1a9f44fec6..3147e73060 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java @@ -44,6 +44,26 @@ public interface TaskDeleteService { */ void cleanupExecutions(Set actionsAsSet, Set ids); + /** + * Clean up the resources that resulted from running the task with the given name. + * + * @param actionsAsSet the actions to perform + * @param taskName the task name + * @param onlyCompleted whether to include only completed tasks + */ + void cleanupExecutions(Set actionsAsSet, String taskName, boolean onlyCompleted); + + /** + * Clean up the resources that resulted from running the task with the given name. + * + * @param actionsAsSet the actions to perform + * @param taskName the task name + * @param onlyCompleted whether to include only completed tasks (ignored when {@code includeTasksEndedMinDaysAgo} is specified) + * @param includeTasksEndedMinDaysAgo only include tasks that have ended at least this many days ago + * @since 2.11.0 + */ + void cleanupExecutions(Set actionsAsSet, String taskName, boolean onlyCompleted, Integer includeTasksEndedMinDaysAgo); + /** * Delete one or more Task executions. * @@ -51,6 +71,13 @@ public interface TaskDeleteService { */ void deleteTaskExecutions(Set ids); + /** + * Delete task executions by name and execution state. + * @param taskName the name of the task executions + * @param onlyCompleted indicator to delete only completed tasks + */ + void deleteTaskExecutions(String taskName, boolean onlyCompleted); + /** * Destroy the task definition. If it is a Composed Task then the task definitions * required for a ComposedTaskRunner task are also destroyed. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java index 2fc649d463..e9fae6a26f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionCreationService.java @@ -30,5 +30,5 @@ public interface TaskExecutionCreationService { * @param taskName the name to be associated with the {@link TaskExecution} * @return {@link TaskExecution} */ - TaskExecution createTaskExecution(String taskName); + TaskExecution createTaskExecution(String taskName, String version); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java index bb598239a7..1392b869e2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.Map; +import java.util.Set; import org.springframework.cloud.dataflow.core.AllPlatformsTaskExecutionInformation; import org.springframework.cloud.dataflow.server.service.impl.TaskExecutionInformation; @@ -58,4 +59,7 @@ TaskExecutionInformation findTaskExecutionInformation(String taskName, * @since 2.3 */ List createTaskDeploymentRequests(String taskName, String dslText); + + Set composedTaskChildNames(String taskName); + Set taskNames(String taskName); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java index ace3d81f37..73c89b57e7 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java @@ -19,6 +19,7 @@ import java.util.Map; import java.util.Set; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.TaskManifest; /** @@ -32,24 +33,25 @@ * @author Gunnar Hillert * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis */ public interface TaskExecutionService { /** * Execute a task with the provided task name and optional runtime properties. * - * @param taskName Name of the task. Must not be null or empty. + * @param taskName Name of the task. Must not be null or empty. * @param taskDeploymentProperties Optional deployment properties. Must not be null. - * @param commandLineArgs Optional runtime commandline argument + * @param commandLineArgs Optional runtime commandline argument * @return the taskExecutionId for the executed task. */ - long executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs); + LaunchResponse executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs); /** * Retrieve logs for the task application. * * @param platformName the name of the platform - * @param taskId the ID that uniquely identifies the task + * @param taskId the ID that uniquely identifies the task * @return the logs of the task application. */ String getLog(String platformName, String taskId); @@ -64,31 +66,54 @@ public interface TaskExecutionService { /** * Request the platform to stop the task executions for the ids provided. * - * @param ids a set of ids for the task executions to be stopped. - * @param platform The name of the platform where the tasks are executing. + * @param ids a set of ids for the task executions to be stopped. + * @param platform The name of the platform where the tasks are executing. */ void stopTaskExecution(Set ids, String platform); /** * Retrieve the TaskManifest for the execution id provided - * @param id task exectution id + * + * @param id task exectution id * @return {@code TaskManifest} or null if not found. */ TaskManifest findTaskManifestById(Long id); + /** + * + * @param ids A set of task execution ids. + * @return collection of manifests mapped by the relevant task execution id. + */ + Map findTaskManifestByIds(Set ids); + /** * Returns all the task execution IDs with the option to include only the completed task executions. + * * @param onlyCompleted filter by completed task executions - * @param taskName the task name, if null then retrieve all the tasks + * @param taskName the task name, if null then retrieve all the tasks + * @return the set of execution ids. * @since 2.8 */ Set getAllTaskExecutionIds(boolean onlyCompleted, String taskName); /** * Returns the count of all the task execution IDs with the option to include only the completed task executions. - * @param onlyCompleted filter by completed task executions - * @param taskName the task name, if null then retrieve all the tasks + * + * @param onlyCompleted whether to include only completed task executions + * @param taskName the task name, if null then retrieve all the tasks + * @return the number of executions * @since 2.8 */ Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName); + + /** + * Returns the count of all the task execution IDs with the option to include only the completed task executions. + * + * @param onlyCompleted whether to include only completed task executions (ignored when {@code includeTasksEndedMinDaysAgo} is specified) + * @param taskName the task name, if null then retrieve all the tasks + * @param includeTasksEndedMinDaysAgo only include tasks that have ended at least this many days ago + * @return the number of executions, 0 if no data, never null + * @since 2.11.0 + */ + Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName, Integer includeTasksEndedMinDaysAgo); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java index f79d8b05c1..84f6b48924 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java @@ -16,8 +16,10 @@ package org.springframework.cloud.dataflow.server.service; +import java.util.Collection; import java.util.Date; -import java.util.List; +import java.util.Map; +import java.util.Set; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Job; @@ -31,7 +33,9 @@ import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.server.batch.JobExecutionWithStepCount; import org.springframework.cloud.dataflow.server.job.support.JobNotRestartableException; +import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; /** @@ -40,6 +44,7 @@ * * @author Glenn Renfro. * @author Gunnar Hillert + * @author Corneil du Plessis */ public interface TaskJobService { @@ -50,29 +55,28 @@ public interface TaskJobService { * @param pageable enumerates the data to be returned. * @return List containing {@link TaskJobExecution}s. * @throws NoSuchJobExecutionException in the event that a job execution id specified is - * not present when looking up stepExecutions for the result. + * not present when looking up stepExecutions for the result. */ - List listJobExecutions(Pageable pageable) throws NoSuchJobExecutionException; + Page listJobExecutions(Pageable pageable) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository * with a specific jobName and matches the data with a task id. * * @param pageable enumerates the data to be returned. - * @param jobName the name of the job for which to findByTaskNameContains. + * @param jobName the name of the job for which to findByTaskNameContains. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCount(Pageable pageable, String jobName) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCount(Pageable pageable, String jobName) throws NoSuchJobException; /** * Retrieves a JobExecution from the JobRepository and matches it with a task id. * - * @param id the id of the {@link JobExecution} + * @param id the id of the {@link JobExecution} * @return the {@link TaskJobExecution}s associated with the id. * @throws NoSuchJobExecutionException if the specified job execution for the id does not - * exist. + * exist. */ TaskJobExecution getJobExecution(long id) throws NoSuchJobExecutionException; @@ -81,59 +85,49 @@ List listJobExecutionsForJobWithStepCount(Pageable pageable, S * specific jobName and matches the data with the associated JobExecutions. * * @param pageable enumerates the data to be returned. - * @param jobName the name of the job for which to findByTaskNameContains. + * @param jobName the name of the job for which to findByTaskNameContains. * @return List containing {@link JobInstanceExecutions}. * @throws NoSuchJobException if the job for the jobName specified does not exist. */ - List listTaskJobInstancesForJobName(Pageable pageable, String jobName) - throws NoSuchJobException; + Page listTaskJobInstancesForJobName(Pageable pageable, String jobName) throws NoSuchJobException; /** * Retrieves a {@link JobInstance} from the JobRepository and matches it with the * associated {@link JobExecution}s. * - * @param id the id of the {@link JobInstance} + * @param id the id of the {@link JobInstance} * @return the {@link JobInstanceExecutions} associated with the id. * @throws NoSuchJobInstanceException if job instance id does not exist. - * @throws NoSuchJobException if the job for the job instance does not exist. + * @throws NoSuchJobException if the job for the job instance does not exist. */ JobInstanceExecutions getJobInstance(long id) throws NoSuchJobInstanceException, NoSuchJobException; /** - * Retrieves the total number of job instances for a job name. - * - * @param jobName the name of the job instance. - * @return the number of job instances associated with the jobName. - * @throws NoSuchJobException if the job for jobName specified does not exist. - */ - int countJobInstances(String jobName) throws NoSuchJobException; - - /** - * Retrieves the total number of the job executions. - * - * @return the total number of job executions. - */ - int countJobExecutions(); - - /** - * Retrieves the total number {@link JobExecution} that match a specific job name. + * Restarts a {@link JobExecution} if the respective {@link JobExecution} is actually + * deemed restartable. Otherwise a {@link JobNotRestartableException} is being thrown. + * The system will use {@link TaskConfigurationProperties#isUseJsonJobParameters()} to + * determine the {@link org.springframework.batch.core.JobParameter} serializer. * - * @param jobName the job name to findByTaskNameContains. - * @param status the status of the job execution - * @return the number of {@link JobExecution}s that match the job name. - * @throws NoSuchJobException if the job for the jobName does not exist. + * @param jobExecutionId The id of the JobExecution to restart. + * @throws NoSuchJobExecutionException if the JobExecution for the provided id does not + * exist. */ - int countJobExecutionsForJob(String jobName, BatchStatus status) throws NoSuchJobException; + void restartJobExecution(long jobExecutionId) throws NoSuchJobExecutionException; /** - * Restarts a {@link JobExecution} IF the respective {@link JobExecution} is actually - * deemed restartable. Otherwise a {@link JobNotRestartableException} is being thrown. + * Restarts a {@link JobExecution} if the respective {@link JobExecution} is actually + * deemed restartable. Otherwise, a {@link JobNotRestartableException} is being thrown. * * @param jobExecutionId The id of the JobExecution to restart. + * @param useJsonJobParameters if set to true, dataflow will serialize job parameters to the command line using the + * format provided by {@code JsonJobParametersConverter}. + * If set to false dataflow will use {@code DefaultParametersConverter}. + * If null dataflow will use {@link TaskConfigurationProperties#isUseJsonJobParameters()} + * to determine the {@link org.springframework.batch.core.JobParameter} serializer. * @throws NoSuchJobExecutionException if the JobExecution for the provided id does not - * exist. + * exist. */ - void restartJobExecution(long jobExecutionId) throws NoSuchJobExecutionException; + void restartJobExecution(long jobExecutionId, Boolean useJsonJobParameters) throws NoSuchJobExecutionException; /** * Requests a {@link JobExecution} to stop. @@ -143,11 +137,11 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * responsibility of the implementor of the {@link Job} to react to that request. * Furthermore, this method does not interfere with the associated {@link TaskExecution}. * - * @param jobExecutionId The id of the {@link JobExecution} to stop - * @throws NoSuchJobExecutionException thrown if no job execution exists for the - * jobExecutionId. + * @param jobExecutionId The id of the {@link JobExecution} to stop. + * @throws NoSuchJobExecutionException thrown if no job execution exists for the + * jobExecutionId. * @throws JobExecutionNotRunningException thrown if a stop is requested on a job that is - * not running. + * not running. * @see org.springframework.cloud.dataflow.server.batch.JobService#stop(Long) */ void stopJobExecution(long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException; @@ -158,24 +152,27 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * * @param pageable enumerates the data to be returned. * @return List containing {@link TaskJobExecution}s. - * * @throws NoSuchJobExecutionException thrown if the job execution specified does not - * exist. + * exist. */ - List listJobExecutionsWithStepCount(Pageable pageable) throws NoSuchJobExecutionException; + Page listJobExecutionsWithStepCount(Pageable pageable) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecution} from the JobRepository with a specific * jobName, status and matches the data with a task id. * * @param pageable enumerates the data to be returned. - * @param jobName the name of the job for which to findByTaskNameContains. - * @param status the BatchStatus of the job execution. + * @param jobName the name of the job for which to findByTaskNameContains. + * @param status the BatchStatus of the job execution. * @return List containing {@link TaskJobExecution}s. - * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobExecutionException the job execution with the given name doesn't exist. */ - List listJobExecutionsForJob(Pageable pageable, String jobName, BatchStatus status) - throws NoSuchJobException; + Page listJobExecutionsForJob( + Pageable pageable, + String jobName, + BatchStatus status + ) throws NoSuchJobException, NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository @@ -183,34 +180,39 @@ List listJobExecutionsForJob(Pageable pageable, String jobName * * @param pageable enumerates the data to be returned. * @param fromDate the date which start date must be greater than. - * @param toDate the date which start date must be less than. + * @param toDate the date which start date must be less than. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCount(Pageable pageable, Date fromDate, Date toDate) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCount(Pageable pageable, Date fromDate, Date toDate) throws NoSuchJobException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository * filtered by the job instance id. * - * @param pageable enumerates the data to be returned. + * @param pageable enumerates the data to be returned. * @param jobInstanceId the job instance id associated with the execution. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(Pageable pageable, int jobInstanceId) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCountFilteredByJobInstanceId( + Pageable pageable, + int jobInstanceId) throws NoSuchJobException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository * filtered by the task execution id. * - * @param pageable enumerates the data to be returned. + * @param pageable enumerates the data to be returned. * @param taskExecutionId the task execution id associated with the execution. * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(Pageable pageable, int taskExecutionId) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId( + Pageable pageable, + int taskExecutionId + ) throws NoSuchJobException; + + Map> getJobExecutionIdsByTaskExecutionIds(Collection taskExecutionIds); + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java index 4fa6899ede..070ad1742f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/AppDeploymentRequestCreator.java @@ -48,6 +48,7 @@ /** * Create the list of {@link AppDeploymentRequest}s from a {@link StreamDefinition} and * deployment properties map. + * * @author Eric Bottard * @author Mark Fisher * @author Patrick Peralta @@ -69,10 +70,12 @@ public class AppDeploymentRequestCreator { private final StreamDefinitionService streamDefinitionService; - public AppDeploymentRequestCreator(AppRegistryService appRegistry, - CommonApplicationProperties commonApplicationProperties, - ApplicationConfigurationMetadataResolver metadataResolver, - StreamDefinitionService streamDefinitionService) { + + public AppDeploymentRequestCreator( + AppRegistryService appRegistry, + CommonApplicationProperties commonApplicationProperties, + ApplicationConfigurationMetadataResolver metadataResolver, + StreamDefinitionService streamDefinitionService) { Assert.notNull(appRegistry, "AppRegistryService must not be null"); Assert.notNull(commonApplicationProperties, "CommonApplicationProperties must not be null"); Assert.notNull(metadataResolver, "MetadataResolver must not be null"); @@ -83,20 +86,23 @@ public AppDeploymentRequestCreator(AppRegistryService appRegistry, this.streamDefinitionService = streamDefinitionService; } - public List createUpdateRequests(StreamDefinition streamDefinition, - Map updateProperties) { + public List createUpdateRequests( + StreamDefinition streamDefinition, + Map updateProperties + ) { List appDeploymentRequests = new ArrayList<>(); if (updateProperties == null) { updateProperties = Collections.emptyMap(); } - Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions(streamDefinition)); + Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions( + streamDefinition)); while (iterator.hasNext()) { StreamAppDefinition currentApp = iterator.next(); ApplicationType type = currentApp.getApplicationType(); AppRegistration appRegistration = this.appRegistry.find(currentApp.getRegisteredAppName(), type); Assert.notNull(appRegistration, - String.format("no application '%s' of type '%s' exists in the registry", - currentApp.getName(), type)); + String.format("no application '%s' of type '%s' exists in the registry", + currentApp.getName(), type)); String version = extractAppVersionProperty(currentApp, updateProperties); List commandlineArguments = new ArrayList<>(); @@ -105,19 +111,20 @@ public List createUpdateRequests(StreamDefinition streamDe } Map appUpdateTimeProperties = extractAppProperties(currentApp, updateProperties); Map deployerDeploymentProperties = DeploymentPropertiesUtils - .extractAndQualifyDeployerProperties(updateProperties, currentApp.getName()); + .extractAndQualifyDeployerProperties(updateProperties, currentApp.getName()); Resource appResource = appRegistry.getAppResource(appRegistration); Resource metadataResource = appRegistry.getAppMetadataResource(appRegistration); Map expandedAppUpdateTimeProperties = (appUpdateTimeProperties.isEmpty()) ? new HashMap<>() : - this.visibleProperties.qualifyProperties(appUpdateTimeProperties, metadataResource); + this.visibleProperties.qualifyProperties(appUpdateTimeProperties, metadataResource); expandedAppUpdateTimeProperties.put(DataFlowPropertyKeys.STREAM_APP_TYPE, type.toString()); - AppDefinition appDefinition = new AppDefinition(currentApp.getName(), expandedAppUpdateTimeProperties); - AppDeploymentRequest request = new AppDeploymentRequest(appDefinition, appResource, - deployerDeploymentProperties, commandlineArguments); + AppDefinition appDefinition = new AppDefinition(currentApp.getName(), expandedAppUpdateTimeProperties); + AppDeploymentRequest request = new AppDeploymentRequest(appDefinition, appResource, + deployerDeploymentProperties, commandlineArguments); + logger.debug("createUpdateRequests:request:{}", request); appDeploymentRequests.add(request); } return appDeploymentRequests; @@ -125,6 +132,9 @@ public List createUpdateRequests(StreamDefinition streamDe private String extractAppVersionProperty(StreamAppDefinition appDefinition, Map updateProperties) { String versionPrefix = String.format("version.%s", appDefinition.getName()); + if (updateProperties.containsKey(versionPrefix)) { + return updateProperties.get(versionPrefix); + } for (Map.Entry entry : updateProperties.entrySet()) { if (entry.getKey().startsWith(versionPrefix)) { return entry.getValue(); @@ -136,45 +146,50 @@ private String extractAppVersionProperty(StreamAppDefinition appDefinition, Map< /** * Create a list of {@link AppDeploymentRequest}s from the provided * {@link StreamDefinition} and map of deployment properties. - * @param streamDefinition the stream definition + * + * @param streamDefinition the stream definition * @param streamDeploymentProperties the stream's deployment properties + * @param platformType the platform types to include * @return list of AppDeploymentRequests */ - public List createRequests(StreamDefinition streamDefinition, - Map streamDeploymentProperties, String platformType) { + public List createRequests( + StreamDefinition streamDefinition, + Map streamDeploymentProperties, String platformType + ) { List appDeploymentRequests = new ArrayList<>(); if (streamDeploymentProperties == null) { streamDeploymentProperties = Collections.emptyMap(); } - Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions(streamDefinition)); + Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions( + streamDefinition)); int nextAppCount = 0; boolean isDownStreamAppPartitioned = false; while (iterator.hasNext()) { StreamAppDefinition currentApp = iterator.next(); AppRegistration appRegistration = this.appRegistry.find(currentApp.getRegisteredAppName(), currentApp.getApplicationType()); Assert.notNull(appRegistration, String.format("no application '%s' of type '%s' exists in the registry", - currentApp.getName(), currentApp.getApplicationType())); + currentApp.getName(), currentApp.getApplicationType())); Map appDeployTimeProperties = extractAppProperties(currentApp, streamDeploymentProperties); Map deployerDeploymentProperties = DeploymentPropertiesUtils - .extractAndQualifyDeployerProperties(streamDeploymentProperties, currentApp.getName()); + .extractAndQualifyDeployerProperties(streamDeploymentProperties, currentApp.getName()); deployerDeploymentProperties.put(AppDeployer.GROUP_PROPERTY_KEY, currentApp.getStreamName()); + String version = extractAppVersionProperty(currentApp, streamDeploymentProperties); List commandlineArguments = new ArrayList<>(); if (version != null) { // TODO ensure new version as a resource exists and load that AppRegistration commandlineArguments.add(version); } - // Set instance count property if (deployerDeploymentProperties.containsKey(AppDeployer.COUNT_PROPERTY_KEY)) { appDeployTimeProperties.put(StreamPropertyKeys.INSTANCE_COUNT, - deployerDeploymentProperties.get(AppDeployer.COUNT_PROPERTY_KEY)); + deployerDeploymentProperties.get(AppDeployer.COUNT_PROPERTY_KEY)); } boolean upstreamAppSupportsPartition = upstreamAppHasPartitionInfo(streamDefinition, currentApp, - streamDeploymentProperties); + streamDeploymentProperties); if (currentApp.getApplicationType() != ApplicationType.app) { if (upstreamAppSupportsPartition) { @@ -194,8 +209,8 @@ public List createRequests(StreamDefinition streamDefiniti isDownStreamAppPartitioned = isPartitionedConsumer(appDeployTimeProperties, upstreamAppSupportsPartition); } - logger.info(String.format("Creating resource with [%s] for application [%s]", - appRegistration.getUri().toString(), currentApp.getName())); + logger.info("Creating resource with [{}] for application [{}]", + appRegistration.getUri().toString(), currentApp.getName()); Resource appResource = this.appRegistry.getAppResource(appRegistration); Resource metadataResource = this.appRegistry.getAppMetadataResource(appRegistration); @@ -214,13 +229,12 @@ public List createRequests(StreamDefinition streamDefiniti // Merge *definition time* app properties with *deployment time* properties // and expand them to their long form if applicable AppDefinition revisedDefinition = mergeAndExpandAppProperties(currentApp, metadataResource, - appDeployTimeProperties); + appDeployTimeProperties); AppDeploymentRequest request = new AppDeploymentRequest(revisedDefinition, appResource, - deployerDeploymentProperties, commandlineArguments); + deployerDeploymentProperties, commandlineArguments); - logger.debug("Created AppDeploymentRequest = " + request.toString() + " AppDefinition = " - + request.getDefinition().toString()); + logger.debug("Created AppDeploymentRequest = {}, AppDefinition = {}", request, request.getDefinition()); appDeploymentRequests.add(request); } return appDeploymentRequests; @@ -229,39 +243,38 @@ public List createRequests(StreamDefinition streamDefiniti private void contributeCommonApplicationProperties(String platformType, Map appDeployTimeProperties) { String platformTypePrefix = platformType + "."; this.commonApplicationProperties.getStreamResourceProperties() - .ifPresent(defaults -> defaults.entrySet().stream() - .filter(e -> e.getValue() != null) - .filter(e -> e.getKey().toString().startsWith(platformTypePrefix)) - .forEach(e -> appDeployTimeProperties.putIfAbsent( - e.getKey().toString().replaceFirst(platformTypePrefix, ""), e.getValue().toString()))); + .ifPresent(defaults -> defaults.entrySet().stream() + .filter(e -> e.getValue() != null) + .filter(e -> e.getKey().toString().startsWith(platformTypePrefix)) + .forEach(e -> appDeployTimeProperties.putIfAbsent( + e.getKey().toString().replaceFirst(platformTypePrefix, ""), e.getValue().toString()))); } /** * Extract and return a map of properties for a specific app within the deployment * properties of a stream. * - * @param appDefinition the {@link StreamAppDefinition} for which to return a map of - * properties + * @param appDefinition the {@link StreamAppDefinition} for which to return a map of + * properties * @param streamDeploymentProperties deployment properties for the stream that the app is - * defined in + * defined in * @return map of properties for an app */ - /* default */ Map extractAppProperties(StreamAppDefinition appDefinition, - Map streamDeploymentProperties) { - Map appDeploymentProperties = new HashMap<>(); - appDeploymentProperties.putAll(this.commonApplicationProperties.getStream()); - // add properties with wild card prefix - String wildCardProducerPropertyPrefix = "app.*.producer."; - String wildCardConsumerPropertyPrefix = "app.*.consumer."; + /* default */ Map extractAppProperties( + StreamAppDefinition appDefinition, + Map streamDeploymentProperties + ) { + Map appDeploymentProperties = new HashMap<>(this.commonApplicationProperties.getStream()); String wildCardPrefix = "app.*."; - parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, wildCardProducerPropertyPrefix, - wildCardConsumerPropertyPrefix, wildCardPrefix); + parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, wildCardPrefix); // add application specific properties - String producerPropertyPrefix = String.format("app.%s.producer.", appDefinition.getName()); - String consumerPropertyPrefix = String.format("app.%s.consumer.", appDefinition.getName()); - String appPrefix = String.format("app.%s.", appDefinition.getName()); - parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, producerPropertyPrefix, - consumerPropertyPrefix, appPrefix); + List names = new ArrayList<>(); + names.add(String.format("app.%s.", appDefinition.getName())); + if (!appDefinition.getName().equals(appDefinition.getRegisteredAppName())) { + names.add(appDefinition.getRegisteredAppName()); + } + parseAndPopulateProperties(streamDeploymentProperties, appDeploymentProperties, names.toArray(new String[0])); + logger.debug("extractAppProperties:{}", appDeploymentProperties); return appDeploymentProperties; } @@ -269,45 +282,48 @@ private void contributeCommonApplicationProperties(String platformType, Map streamDeploymentProperties) { - Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions(streamDefinition)); + /* default */ boolean upstreamAppHasPartitionInfo( + StreamDefinition streamDefinition, StreamAppDefinition currentApp, + Map streamDeploymentProperties + ) { + Iterator iterator = StreamDefinitionServiceUtils.getDeploymentOrderIterator(this.streamDefinitionService.getAppDefinitions( + streamDefinition)); while (iterator.hasNext()) { StreamAppDefinition app = iterator.next(); if (app.equals(currentApp) && iterator.hasNext()) { StreamAppDefinition prevApp = iterator.next(); Map appDeploymentProperties = extractAppProperties(prevApp, streamDeploymentProperties); return appDeploymentProperties.containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXPRESSION) - || appDeploymentProperties - .containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXTRACTOR_CLASS); + || appDeploymentProperties + .containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXTRACTOR_CLASS); } } return false; } - /* default */ void parseAndPopulateProperties(Map streamDeploymentProperties, - Map appDeploymentProperties, String producerPropertyPrefix, - String consumerPropertyPrefix, - String appPrefix) { + /* default */ void parseAndPopulateProperties( + Map streamDeploymentProperties, + Map appDeploymentProperties, + String... prefixes + ) { for (Map.Entry entry : streamDeploymentProperties.entrySet()) { - if (entry.getKey().startsWith(appPrefix)) { - if (entry.getKey().startsWith(producerPropertyPrefix)) { - appDeploymentProperties.put(BindingPropertyKeys.OUTPUT_BINDING_KEY_PREFIX - + entry.getKey().substring(appPrefix.length()), entry.getValue()); - } - else if (entry.getKey().startsWith(consumerPropertyPrefix)) { - appDeploymentProperties.put( - BindingPropertyKeys.INPUT_BINDING_KEY_PREFIX + entry.getKey().substring(appPrefix.length()), - entry.getValue()); - } - else { - appDeploymentProperties.put(entry.getKey().substring(appPrefix.length()), entry.getValue()); + for (String prefix : prefixes) { + String key = entry.getKey(); + if (key.startsWith(prefix)) { + String value = entry.getValue(); + if (key.startsWith(prefix + "producer")) { + appDeploymentProperties.put(BindingPropertyKeys.OUTPUT_BINDING_KEY_PREFIX + key.substring(prefix.length()), value); + } else if (key.startsWith(prefix + "consumer")) { + appDeploymentProperties.put(BindingPropertyKeys.INPUT_BINDING_KEY_PREFIX + key.substring(prefix.length()), value); + } else { + appDeploymentProperties.put(key.substring(prefix.length()), value); + } } } } @@ -318,8 +334,11 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * merged and short form parameters have been expanded to their long form (amongst the * included supported properties of the app) if applicable. */ - /* default */ AppDefinition mergeAndExpandAppProperties(StreamAppDefinition original, Resource metadataResource, - Map appDeployTimeProperties) { + /* default */ + AppDefinition mergeAndExpandAppProperties( + StreamAppDefinition original, Resource metadataResource, + Map appDeployTimeProperties + ) { Map merged = new HashMap<>(original.getProperties()); merged.putAll(appDeployTimeProperties); merged = this.visibleProperties.qualifyProperties(merged, metadataResource); @@ -330,11 +349,12 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { /** * Add app properties for producing partitioned data to the provided properties. * - * @param properties properties to update + * @param properties properties to update * @param nextInstanceCount the number of instances for the next (downstream) app in the - * stream + * stream */ - /* default */ void updateProducerPartitionProperties(Map properties, int nextInstanceCount) { + /* default */ + void updateProducerPartitionProperties(Map properties, int nextInstanceCount) { properties.put(BindingPropertyKeys.OUTPUT_PARTITION_COUNT, String.valueOf(nextInstanceCount)); if (!properties.containsKey(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXPRESSION)) { properties.put(BindingPropertyKeys.OUTPUT_PARTITION_KEY_EXPRESSION, DEFAULT_PARTITION_KEY_EXPRESSION); @@ -346,7 +366,8 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * * @param properties properties to update */ - /* default */ void updateConsumerPartitionProperties(Map properties) { + /* default */ + void updateConsumerPartitionProperties(Map properties) { properties.put(BindingPropertyKeys.INPUT_PARTITIONED, "true"); } @@ -357,7 +378,8 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * @return instance count indicated in the provided properties; if the properties do not * contain a count, a value of {@code 1} is returned */ - /* default */ int getInstanceCount(Map properties) { + /* default */ + int getInstanceCount(Map properties) { return Integer.parseInt(properties.getOrDefault(AppDeployer.COUNT_PROPERTY_KEY, "1")); } @@ -366,16 +388,19 @@ else if (entry.getKey().startsWith(consumerPropertyPrefix)) { * either by the deployment properties for the app or whether the previous (upstream) app * is publishing partitioned data. * - * @param appDeploymentProperties deployment properties for the app + * @param appDeploymentProperties deployment properties for the app * @param upstreamAppSupportsPartition if true, previous (upstream) app in the stream - * publishes partitioned data + * publishes partitioned data * @return true if the app consumes partitioned data */ - /* default */ boolean isPartitionedConsumer(Map appDeploymentProperties, - boolean upstreamAppSupportsPartition) { + /* default */ + boolean isPartitionedConsumer( + Map appDeploymentProperties, + boolean upstreamAppSupportsPartition + ) { return upstreamAppSupportsPartition - || (appDeploymentProperties.containsKey(BindingPropertyKeys.INPUT_PARTITIONED) - && appDeploymentProperties.get(BindingPropertyKeys.INPUT_PARTITIONED).equalsIgnoreCase("true")); + || (appDeploymentProperties.containsKey(BindingPropertyKeys.INPUT_PARTITIONED) + && appDeploymentProperties.get(BindingPropertyKeys.INPUT_PARTITIONED).equalsIgnoreCase("true")); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/ComposedTaskRunnerConfigurationProperties.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/ComposedTaskRunnerConfigurationProperties.java index 231594c21f..02536691cd 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/ComposedTaskRunnerConfigurationProperties.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/ComposedTaskRunnerConfigurationProperties.java @@ -18,6 +18,7 @@ import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; +import org.springframework.util.Assert; /** * Properties used to define the behavior of the composed task runner. @@ -44,7 +45,7 @@ public class ComposedTaskRunnerConfigurationProperties { * If true SCDF will set the dataflow-server-access-token for the composed * task runner to the user's token when launching composed tasks. */ - private Boolean useUserAccessToken; + private Boolean useUserAccessToken = false; public String getUri() { return uri; @@ -67,6 +68,7 @@ public Boolean isUseUserAccessToken() { } public void setUseUserAccessToken(Boolean useUserAccessToken) { + Assert.notNull(useUserAccessToken, "'useUserAccessToken' cannot be null"); this.useUserAccessToken = useUserAccessToken; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java index b88d5da9a4..a8ab2e09b2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchedulerService.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2020 the original author or authors. + * Copyright 2018-2024 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,11 +19,19 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.TreeMap; +import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; @@ -47,9 +55,12 @@ import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.SchedulerServiceProperties; +import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.scheduler.ScheduleInfo; import org.springframework.cloud.deployer.spi.scheduler.ScheduleRequest; +import org.springframework.cloud.task.listener.TaskException; +import org.springframework.core.env.PropertyResolver; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.data.domain.Page; @@ -63,79 +74,83 @@ * * @author Glenn Renfro * @author Chris Schaefer + * @author Ilayaperumal Gopinathan */ public class DefaultSchedulerService implements SchedulerService { + private static final Logger logger = LoggerFactory.getLogger(DefaultSchedulerService.class); + private final static int MAX_SCHEDULE_NAME_LEN = 52; private CommonApplicationProperties commonApplicationProperties; + private List taskPlatforms; + private TaskDefinitionRepository taskDefinitionRepository; + private AppRegistryService registry; + private final TaskConfigurationProperties taskConfigurationProperties; + private final String dataflowServerUri; + private final VisibleProperties visibleProperties; + private final SchedulerServiceProperties schedulerServiceProperties; + private final AuditRecordService auditRecordService; + private final AuditServiceUtils auditServiceUtils; + private final DataSourceProperties dataSourceProperties; + private final ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; - /** - * Constructor for DefaultSchedulerService - * @param commonApplicationProperties common properties for applications deployed via Spring Cloud Data Flow. - * @param taskPlatforms the {@link TaskPlatform}s for this service. - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} for this service. - * @param registry the {@link AppRegistryService} for this service. - * @param resourceLoader the {@link ResourceLoader} for this service. - * @param taskConfigurationProperties the {@link TaskConfigurationProperties} for this service. - * @param dataSourceProperties the {@link DataSourceProperties} for this service. - * @param dataflowServerUri the Spring Cloud Data Flow uri for this service. - * @param metaDataResolver the {@link ApplicationConfigurationMetadataResolver} for this service. - * @param schedulerServiceProperties the {@link SchedulerServiceProperties} for this service. - * @param auditRecordService the {@link AuditRecordService} for this service. - */ - @Deprecated - public DefaultSchedulerService(CommonApplicationProperties commonApplicationProperties, - List taskPlatforms, TaskDefinitionRepository taskDefinitionRepository, - AppRegistryService registry, ResourceLoader resourceLoader, - TaskConfigurationProperties taskConfigurationProperties, - DataSourceProperties dataSourceProperties, String dataflowServerUri, - ApplicationConfigurationMetadataResolver metaDataResolver, - SchedulerServiceProperties schedulerServiceProperties, - AuditRecordService auditRecordService) { - - this(commonApplicationProperties, taskPlatforms, taskDefinitionRepository, registry, resourceLoader, - taskConfigurationProperties, dataSourceProperties, dataflowServerUri, metaDataResolver, - schedulerServiceProperties, auditRecordService, null); - } + private final TaskExecutionInfoService taskExecutionInfoService; + + private final PropertyResolver propertyResolver; + + private static final Pattern TASK_NAME_PATTERN = Pattern.compile("[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?"); + + private static final String TASK_NAME_VALIDATION_MSG = "Task name must consist of alphanumeric characters " + + "or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', " + + "or 'abc-123')"; /** * Constructor for DefaultSchedulerService - * @param commonApplicationProperties common properties for applications deployed via Spring Cloud Data Flow. - * @param taskPlatforms the {@link TaskPlatform}s for this service. - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} for this service. - * @param registry the {@link AppRegistryService} for this service. - * @param resourceLoader the {@link ResourceLoader} for this service. - * @param taskConfigurationProperties the {@link TaskConfigurationProperties} for this service. - * @param dataSourceProperties the {@link DataSourceProperties} for this service. - * @param dataflowServerUri the Spring Cloud Data Flow uri for this service. - * @param metaDataResolver the {@link ApplicationConfigurationMetadataResolver} for this service. - * @param schedulerServiceProperties the {@link SchedulerServiceProperties} for this service. - * @param auditRecordService the {@link AuditRecordService} for this service. - * @param composedTaskRunnerConfigurationProperties the {@link ComposedTaskRunnerConfigurationProperties} for this - * service + * + * @param commonApplicationProperties common properties for applications deployed via Spring Cloud Data Flow. + * @param taskPlatforms the {@link TaskPlatform}s for this service. + * @param taskDefinitionRepository the {@link TaskDefinitionRepository} for this service. + * @param registry the {@link AppRegistryService} for this service. + * @param resourceLoader the {@link ResourceLoader} for this service. + * @param taskConfigurationProperties the {@link TaskConfigurationProperties} for this service. + * @param dataSourceProperties the {@link DataSourceProperties} for this service. + * @param dataflowServerUri the Spring Cloud Data Flow uri for this service. + * @param metaDataResolver the {@link ApplicationConfigurationMetadataResolver} for this service. + * @param schedulerServiceProperties the {@link SchedulerServiceProperties} for this service. + * @param auditRecordService the {@link AuditRecordService} for this service. + * @param taskExecutionInfoService the {@link TaskExecutionInfoService} for this service + * @param propertyResolver the {@link PropertyResolver} for this service + * @param composedTaskRunnerConfigurationProperties the {@link ComposedTaskRunnerConfigurationProperties} for this service */ - public DefaultSchedulerService(CommonApplicationProperties commonApplicationProperties, - List taskPlatforms, TaskDefinitionRepository taskDefinitionRepository, - AppRegistryService registry, ResourceLoader resourceLoader, + public DefaultSchedulerService( + CommonApplicationProperties commonApplicationProperties, + List taskPlatforms, + TaskDefinitionRepository taskDefinitionRepository, + AppRegistryService registry, + ResourceLoader resourceLoader, TaskConfigurationProperties taskConfigurationProperties, - DataSourceProperties dataSourceProperties, String dataflowServerUri, + DataSourceProperties dataSourceProperties, + String dataflowServerUri, ApplicationConfigurationMetadataResolver metaDataResolver, SchedulerServiceProperties schedulerServiceProperties, AuditRecordService auditRecordService, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + TaskExecutionInfoService taskExecutionInfoService, + PropertyResolver propertyResolver, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { Assert.notNull(commonApplicationProperties, "commonApplicationProperties must not be null"); Assert.notNull(taskPlatforms, "taskPlatforms must not be null"); Assert.notNull(registry, "AppRegistryService must not be null"); @@ -146,6 +161,8 @@ public DefaultSchedulerService(CommonApplicationProperties commonApplicationProp Assert.notNull(schedulerServiceProperties, "schedulerServiceProperties must not be null"); Assert.notNull(auditRecordService, "AuditRecordService must not be null"); Assert.notNull(dataSourceProperties, "dataSourceProperties must not be null"); + Assert.notNull(taskExecutionInfoService, "taskExecutionInfoService must not be null"); + Assert.notNull(propertyResolver, "propertyResolver must not be null"); this.commonApplicationProperties = commonApplicationProperties; this.taskPlatforms = taskPlatforms; this.taskDefinitionRepository = taskDefinitionRepository; @@ -157,46 +174,89 @@ public DefaultSchedulerService(CommonApplicationProperties commonApplicationProp this.auditRecordService = auditRecordService; this.auditServiceUtils = new AuditServiceUtils(); this.dataSourceProperties = dataSourceProperties; + this.taskExecutionInfoService = taskExecutionInfoService; + this.propertyResolver = propertyResolver; this.composedTaskRunnerConfigurationProperties = composedTaskRunnerConfigurationProperties; } @Override - public void schedule(String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, - List commandLineArgs) { + public void schedule( + String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, + List commandLineArgs + ) { schedule(scheduleName, taskDefinitionName, taskDeploymentProperties, commandLineArgs, null); } + @SuppressWarnings("DuplicatedCode") @Override - public void schedule(String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, - List commandLineArgs, String platformName) { + public void schedule( + String scheduleName, String taskDefinitionName, Map taskDeploymentProperties, + List commandLineArgs, String platformName + ) { + String platformType = StreamSupport.stream(getLaunchers().spliterator(), true) + .filter(deployer -> deployer.getName().equalsIgnoreCase(platformName)) + .map(Launcher::getType) + .findFirst() + .orElse("unknown"); + if (platformType.equals(TaskPlatformFactory.KUBERNETES_PLATFORM_TYPE) && !TASK_NAME_PATTERN.matcher(taskDefinitionName).matches()) { + throw new TaskException(String.format("Task name %s is invalid. %s", taskDefinitionName, TASK_NAME_VALIDATION_MSG)); + } Assert.hasText(taskDefinitionName, "The provided taskName must not be null or empty."); Assert.notNull(taskDeploymentProperties, "The provided taskDeploymentProperties must not be null."); - TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(taskDefinitionName) - .orElseThrow(() -> new NoSuchTaskDefinitionException(taskDefinitionName)); + TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(taskDefinitionName).orElse(null); + if (taskDefinition == null) { + throw new NoSuchTaskDefinitionException(taskDefinitionName); + } + + String taskAppName = taskDefinition.getRegisteredAppName(); + String taskLabel = taskDefinition.getAppDefinition().getName(); + String version = taskDeploymentProperties.get("version." + taskLabel); TaskParser taskParser = new TaskParser(taskDefinition.getName(), taskDefinition.getDslText(), true, true); TaskNode taskNode = taskParser.parse(); AppRegistration appRegistration; // if composed task definition replace definition with one composed task // runner and executable graph. if (taskNode.isComposed()) { - taskDefinition = new TaskDefinition(taskDefinition.getName(), - TaskServiceUtils.createComposedTaskDefinition( - taskNode.toExecutableDSL())); - taskDeploymentProperties = TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, taskNode); - TaskServiceUtils.addImagePullSecretProperty(taskDeploymentProperties, - this.composedTaskRunnerConfigurationProperties); + taskDefinition = new TaskDefinition(taskDefinition.getName(), TaskServiceUtils.createComposedTaskDefinition(taskNode.toExecutableDSL())); + Map establishedComposedTaskProperties = TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, taskNode); + taskDeploymentProperties.putAll(establishedComposedTaskProperties); + TaskServiceUtils.addImagePullSecretProperty(taskDeploymentProperties, this.composedTaskRunnerConfigurationProperties); try { - appRegistration = new AppRegistration(ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, - ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.taskConfigurationProperties, - this.composedTaskRunnerConfigurationProperties))); - } - catch (URISyntaxException e) { + appRegistration = new AppRegistration( + ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, + ApplicationType.task, + new URI(this.composedTaskRunnerConfigurationProperties.getUri())); + } catch (URISyntaxException e) { throw new IllegalStateException("Invalid Compose Task Runner Resource", e); } - } - else { + Set appNames = taskExecutionInfoService.composedTaskChildNames(taskDefinition.getName()); + + logger.info("composedTask:dsl={}:appNames:{}", taskDefinition.getDslText(), appNames); + addPrefixProperties("app.composed-task-runner.", taskDeploymentProperties); + addPrefixProperties("app." + scheduleName + ".", taskDeploymentProperties); + for (String appName : appNames) { + List names = new ArrayList<>(Arrays.asList(StringUtils.delimitedListToStringArray(appName, ","))); + String registeredName = names.get(0); + String appId = registeredName; + if (names.size() > 1) { + appId = names.get(1); + } + String appVersion = taskDeploymentProperties.get("version." + taskAppName + "-" + appId + "." + appId); + if(!StringUtils.hasText(appVersion)) { + appVersion = taskDeploymentProperties.get("version." + taskAppName + "-" + appId); + } + if(!StringUtils.hasText(appVersion)) { + appVersion = taskDeploymentProperties.get("version." + appId); + } + logger.debug("ctr:{}:registeredName={}, version={}", names, registeredName, appVersion); + } + logger.debug("ctr:added:{}:{}", scheduleName, taskDeploymentProperties); + commandLineArgs = TaskServiceUtils.convertCommandLineArgsToCTRFormat(commandLineArgs); + } else { appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), ApplicationType.task); + addPrefixCommandLineArgs("app." + taskDefinition.getRegisteredAppName() + ".", commandLineArgs); + addPrefixProperties("app." + taskDefinition.getRegisteredAppName() + ".", taskDeploymentProperties); } Assert.notNull(appRegistration, "Unknown task app: " + taskDefinition.getRegisteredAppName()); Resource metadataResource = this.registry.getAppMetadataResource(appRegistration); @@ -205,8 +265,7 @@ ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.t TaskServiceUtils.addDatabaseCredentials(this.taskConfigurationProperties.isUseKubernetesSecretsForDbCredentials(), launcher.getType())); Map appDeploymentProperties = new HashMap<>(commonApplicationProperties.getTask()); - appDeploymentProperties.putAll( - TaskServiceUtils.extractAppProperties(taskDefinition.getRegisteredAppName(), taskDeploymentProperties)); + appDeploymentProperties.putAll(TaskServiceUtils.extractAppProperties(taskDefinition.getRegisteredAppName(), taskDeploymentProperties)); // Merge the common properties defined via the spring.cloud.dataflow.common-properties.task-resource file. // Doesn't override existing properties! @@ -214,7 +273,7 @@ ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.t TaskServiceUtils.contributeCommonProperties(this.commonApplicationProperties.getTaskResourceProperties(), appDeploymentProperties, "common"); TaskServiceUtils.contributeCommonProperties(this.commonApplicationProperties.getTaskResourceProperties(), - appDeploymentProperties, launcher.getType().toLowerCase()); + appDeploymentProperties, launcher.getType().toLowerCase(Locale.ROOT)); Map deployerDeploymentProperties = DeploymentPropertiesUtils .extractAndQualifyDeployerProperties(taskDeploymentProperties, taskDefinition.getRegisteredAppName()); @@ -224,12 +283,15 @@ ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.t AppDefinition revisedDefinition = TaskServiceUtils.mergeAndExpandAppProperties(taskDefinition, metadataResource, appDeploymentProperties, visibleProperties); DeploymentPropertiesUtils.validateDeploymentProperties(taskDeploymentProperties); - taskDeploymentProperties = extractAndQualifySchedulerProperties(taskDeploymentProperties); - + taskDeploymentProperties = filterPrefixedProperties(taskDeploymentProperties); + deployerDeploymentProperties.putAll(taskDeploymentProperties); scheduleName = validateScheduleNameForPlatform(launcher.getType(), scheduleName); + ScheduleRequest scheduleRequest = new ScheduleRequest(revisedDefinition, + deployerDeploymentProperties, + commandLineArgs, + scheduleName, + getTaskResource(taskDefinitionName, version)); - ScheduleRequest scheduleRequest = new ScheduleRequest(revisedDefinition, taskDeploymentProperties, - deployerDeploymentProperties, commandLineArgs, scheduleName, getTaskResource(taskDefinitionName)); launcher.getScheduler().schedule(scheduleRequest); this.auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE, @@ -237,13 +299,42 @@ ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.t launcher.getName()); } + + private static void addProperty(String property, String value, Map properties) { + if (properties.containsKey(property)) { + logger.debug("exists:{}={}", property, properties.get(property)); + } else { + logger.debug("adding:{}={}", property, value); + properties.put(property, value); + } + } + + private static void addPrefixProperties(String prefix, Map deploymentProperties) { + addProperty(prefix + "spring.cloud.task.initialize-enabled", "false", deploymentProperties); + } + + private static void addPrefixCommandLineArgs(String prefix, List commandLineArgs) { + addCommandLine(prefix + "spring.cloud.task.initialize-enabled", "false", commandLineArgs); + } + + private static void addCommandLine(String property, String value, List commandLineArgs) { + String argPrefix = "--" + property + "="; + if(commandLineArgs.stream().noneMatch(item -> item.startsWith(argPrefix))) { + String arg = argPrefix + value; + commandLineArgs.add(arg); + logger.debug("adding:{}", arg); + } else { + logger.debug("exists:{}", argPrefix); + } + } + private String validateScheduleNameForPlatform(String type, String scheduleName) { if (type.equals(TaskPlatformFactory.KUBERNETES_PLATFORM_TYPE)) { if (scheduleName.length() > MAX_SCHEDULE_NAME_LEN) { throw new IllegalArgumentException(String.format("the name specified " + "exceeds the maximum schedule name length of %s.", MAX_SCHEDULE_NAME_LEN)); } - scheduleName = scheduleName.toLowerCase(); + scheduleName = scheduleName.toLowerCase(Locale.ROOT); } return scheduleName; } @@ -281,8 +372,7 @@ private Launcher getTaskLauncher(String platformName) { } if (platformName != null && launcherToUse == null) { throw new IllegalArgumentException(String.format("The platform %s does not support a scheduler service.", platformName)); - } - else if (platformName == null && launcherToUse == null) { + } else if (platformName == null && launcherToUse == null) { throw new IllegalStateException("Could not find a default scheduler."); } return launcherToUse; @@ -291,9 +381,7 @@ else if (platformName == null && launcherToUse == null) { private List getLaunchers() { List launchers = new ArrayList<>(); for (TaskPlatform taskPlatform : this.taskPlatforms) { - for (Launcher launcher : taskPlatform.getLaunchers()) { - launchers.add(launcher); - } + launchers.addAll(taskPlatform.getLaunchers()); } return launchers; } @@ -394,7 +482,7 @@ public ScheduleInfo getSchedule(String scheduleName, String platformName) { .filter(scheduleInfo -> scheduleInfo.getScheduleName().equals(scheduleName)) .collect(Collectors.toList()); Assert.isTrue(!(result.size() > 1), "more than one schedule was returned for scheduleName, should only be one"); - return result.size() > 0 ? result.get(0) : null; + return !result.isEmpty() ? result.get(0) : null; } @Override @@ -402,8 +490,10 @@ public ScheduleInfo getSchedule(String scheduleName) { return getSchedule(scheduleName, null); } - private List limitScheduleInfoResultSize(List resultSet, - int schedulerLimitResultSize) { + private List limitScheduleInfoResultSize( + List resultSet, + int schedulerLimitResultSize + ) { if (resultSet.size() > schedulerLimitResultSize) { resultSet = resultSet.subList(0, schedulerLimitResultSize); } @@ -411,42 +501,39 @@ private List limitScheduleInfoResultSize(List result } /** - * Retain only properties that are meant for the scheduler of a given task(those - * that start with {@code scheduler.}and qualify all - * property values with the {@code spring.cloud.scheduler.} prefix. + * Provided a filtered Map that removes entries prefixed with "app." or "deployer.". * - * @param input the scheduler properties - * @return scheduler properties for the task + * @param input the properties + * @return deployer properties for the schedule */ - private static Map extractAndQualifySchedulerProperties(Map input) { - final String prefix = "scheduler."; - final int prefixLength = prefix.length(); - + private static Map filterPrefixedProperties(Map input) { return new TreeMap<>(input).entrySet().stream() - .filter(kv -> kv.getKey().startsWith(prefix)) - .collect(Collectors.toMap(kv -> "spring.cloud.scheduler." + kv.getKey().substring(prefixLength), Map.Entry::getValue, - (fromWildcard, fromApp) -> fromApp)); + .filter(kv -> (!kv.getKey().startsWith("deployer.") && !kv.getKey().startsWith("app."))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } - protected Resource getTaskResource(String taskDefinitionName) { + protected Resource getTaskResource(String taskDefinitionName, String version) { TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(taskDefinitionName) .orElseThrow(() -> new NoSuchTaskDefinitionException(taskDefinitionName)); AppRegistration appRegistration = null; if (TaskServiceUtils.isComposedTaskDefinition(taskDefinition.getDslText())) { URI composedTaskUri = null; - String composedTaskLauncherUri = TaskServiceUtils.getComposedTaskLauncherUri(this.taskConfigurationProperties, - this.composedTaskRunnerConfigurationProperties); + String composedTaskLauncherUri = this.composedTaskRunnerConfigurationProperties.getUri(); try { composedTaskUri = new URI(composedTaskLauncherUri); - } - catch (URISyntaxException e) { + } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid Composed Task Url: " + composedTaskLauncherUri); } appRegistration = new AppRegistration(ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, ApplicationType.task, composedTaskUri); - } - else { - appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), + } else { + if(version != null) { + appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), + ApplicationType.task, version); + } + else { + appRegistration = this.registry.find(taskDefinition.getRegisteredAppName(), ApplicationType.task); + } } Assert.notNull(appRegistration, "Unknown task app: " + taskDefinition.getRegisteredAppName()); return this.registry.getAppResource(appRegistration); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java index 4db3c4b9e2..b18b85cc93 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultStreamService.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.springframework.cloud.dataflow.server.service.impl; import java.util.ArrayList; @@ -29,7 +30,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.DumperOptions; +import org.yaml.snakeyaml.LoaderOptions; import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; +import org.yaml.snakeyaml.representer.Representer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.audit.service.AuditServiceUtils; @@ -84,6 +88,7 @@ * @author Christian Tzolov * @author Gunnar Hillert * @author Chris Schaefer + * @author Chris Bono */ @Transactional public class DefaultStreamService implements StreamService { @@ -168,6 +173,12 @@ private Release doDeployStream(StreamDefinition streamDefinition, Map appDeploymentRequests = this.appDeploymentRequestCreator .createRequests(streamDefinition, deploymentPropertiesToUse, platformType); @@ -232,20 +243,19 @@ private void updateStreamDefinitionFromReleaseManifest(String streamName, String streamDefinition.getOriginalDslText(), streamDefinition.getDescription()); logger.debug("Updated StreamDefinition: " + updatedStreamDefinition); - // TODO consider adding an explicit UPDATE method to the streamDefRepository - // Note: Not transactional and can lead to loosing the stream definition + // NOTE: Not transactional and can lead to losing the stream definition this.streamDefinitionRepository.delete(updatedStreamDefinition); this.streamDefinitionRepository.save(updatedStreamDefinition); this.auditRecordService.populateAndSaveAuditRecord( AuditOperationType.STREAM, AuditActionType.UPDATE, streamName, - updatedStreamDefinition.getDslText(), null); + this.streamDefinitionService.redactDsl(updatedStreamDefinition), null); } @Override public void scaleApplicationInstances(String streamName, String appName, int count, Map properties) { // Skipper expects app names / labels not deployment ids - logger.info(String.format("Scale %s:%s to %s with properties: %s", streamName, appName, count, properties)); + logger.info("Scale {}:{} to {} with properties: {}", streamName, appName, count, properties); this.skipperStreamDeployer.scale(streamName, appName, count, properties); } @@ -334,7 +344,7 @@ String convertPropertiesToSkipperYaml(StreamDefinition streamDefinition, if (hasProps) { appMap.put(SpringCloudDeployerApplicationManifest.SPEC_STRING, specMap); } - if (appMap.size() != 0) { + if (!appMap.isEmpty()) { skipperConfigValuesMap.put(appName, appMap); } } @@ -343,7 +353,7 @@ String convertPropertiesToSkipperYaml(StreamDefinition streamDefinition, dumperOptions.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); dumperOptions.setPrettyFlow(true); dumperOptions.setLineBreak(DumperOptions.LineBreak.getPlatformLineBreak()); - Yaml yaml = new Yaml(dumperOptions); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions()), new Representer(dumperOptions), dumperOptions); return yaml.dump(skipperConfigValuesMap); } else { @@ -383,19 +393,9 @@ public StreamDeployment info(String streamName) { return this.skipperStreamDeployer.getStreamInfo(streamName); } - /** - * Create a new stream. - * - * @param streamName stream name - * @param dsl DSL definition for stream - * @param description description of the stream definition - * @param deploy if {@code true}, the stream is deployed upon creation (default is - * {@code false}) - * @return the created stream definition already exists - * @throws InvalidStreamDefinitionException if there are errors in parsing the stream DSL, - * resolving the name, or type of applications in the stream - */ - public StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy) { + @Override + public StreamDefinition createStream(String streamName, String dsl, String description, boolean deploy, + Map deploymentProperties) { StreamDefinition streamDefinition = createStreamDefinition(streamName, dsl, description); List errorMessages = new ArrayList<>(); @@ -409,10 +409,6 @@ public StreamDefinition createStream(String streamName, String dsl, String descr } } - if (!STREAM_NAME_PATTERN.matcher(streamName).matches()) { - errorMessages.add(STREAM_NAME_VALIDATION_MSG); - } - if (!errorMessages.isEmpty()) { throw new InvalidStreamDefinitionException( StringUtils.collectionToDelimitedString(errorMessages, "\n")); @@ -426,7 +422,7 @@ public StreamDefinition createStream(String streamName, String dsl, String descr final StreamDefinition savedStreamDefinition = this.streamDefinitionRepository.save(streamDefinition); if (deploy) { - this.deployStream(streamName, new HashMap<>()); + this.deployStream(streamName, deploymentProperties); } auditRecordService.populateAndSaveAuditRecord( @@ -561,7 +557,7 @@ private Set findRelatedDefinitions(StreamDefinition currentStr */ public Page findDefinitionByNameContains(Pageable pageable, String search) { Page streamDefinitions; - if (search != null) { + if (StringUtils.hasLength(search)) { streamDefinitions = streamDefinitionRepository.findByNameContains(search, pageable); } else { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java index 1f46022ad5..bb029824d6 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,13 +16,20 @@ package org.springframework.cloud.dataflow.server.service.impl; +import java.util.Collection; import java.util.HashSet; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,12 +40,12 @@ import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.cloud.dataflow.core.dsl.TaskNode; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionControllerDeleteAction; import org.springframework.cloud.dataflow.server.job.LauncherRepository; -import org.springframework.cloud.dataflow.server.repository.CannotDeleteNonParentTaskExecutionException; import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; @@ -48,9 +55,10 @@ import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.TaskDeleteService; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; import org.springframework.cloud.deployer.spi.task.TaskLauncher; import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.orm.ObjectOptimisticLockingFailureException; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.Assert; @@ -71,15 +79,21 @@ * @author Michael Wirth * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis + * @author Joe O'Brien */ public class DefaultTaskDeleteService implements TaskDeleteService { private static final Logger logger = LoggerFactory.getLogger(DefaultTaskDeleteService.class); + private static final int SQL_SERVER_CHUNK_SIZE = 2098; + + private static final int ORACLE_SERVER_CHUNK_SIZE = 998; + /** * Used to read TaskExecutions. */ - private final TaskExplorer taskExplorer; + private final DataflowTaskExplorer taskExplorer; private final LauncherRepository launcherRepository; @@ -95,18 +109,27 @@ public class DefaultTaskDeleteService implements TaskDeleteService { protected final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao; - private SchedulerService schedulerService; + private final SchedulerService schedulerService; private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); - public DefaultTaskDeleteService(TaskExplorer taskExplorer, LauncherRepository launcherRepository, + private final int taskDeleteChunkSize; + + private final DataSource dataSource; + + public DefaultTaskDeleteService( + DataflowTaskExplorer taskExplorer, + LauncherRepository launcherRepository, TaskDefinitionRepository taskDefinitionRepository, TaskDeploymentRepository taskDeploymentRepository, AuditRecordService auditRecordService, DataflowTaskExecutionDao dataflowTaskExecutionDao, DataflowJobExecutionDao dataflowJobExecutionDao, DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - SchedulerService schedulerService) { + SchedulerService schedulerService, + TaskConfigurationProperties taskConfigurationProperties, + DataSource dataSource + ) { Assert.notNull(taskExplorer, "TaskExplorer must not be null"); Assert.notNull(launcherRepository, "LauncherRepository must not be null"); Assert.notNull(taskDefinitionRepository, "TaskDefinitionRepository must not be null"); @@ -115,6 +138,9 @@ public DefaultTaskDeleteService(TaskExplorer taskExplorer, LauncherRepository la Assert.notNull(dataflowTaskExecutionDao, "DataflowTaskExecutionDao must not be null"); Assert.notNull(dataflowJobExecutionDao, "DataflowJobExecutionDao must not be null"); Assert.notNull(dataflowTaskExecutionMetadataDao, "DataflowTaskExecutionMetadataDao must not be null"); + Assert.notNull(taskConfigurationProperties, "TaskConfigurationProperties must not be null"); + Assert.notNull(dataSource, "DataSource must not be null"); + this.taskExplorer = taskExplorer; this.launcherRepository = launcherRepository; this.taskDefinitionRepository = taskDefinitionRepository; @@ -124,93 +150,180 @@ public DefaultTaskDeleteService(TaskExplorer taskExplorer, LauncherRepository la this.dataflowJobExecutionDao = dataflowJobExecutionDao; this.dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDao; this.schedulerService = schedulerService; + this.taskDeleteChunkSize = taskConfigurationProperties.getExecutionDeleteChunkSize(); + this.dataSource = dataSource; } @Override + @Transactional public void cleanupExecution(long id) { + performCleanupExecution(id); + } + + private void performCleanupExecution(long id) { TaskExecution taskExecution = taskExplorer.getTaskExecution(id); Assert.notNull(taskExecution, "There was no task execution with id " + id); String launchId = taskExecution.getExternalExecutionId(); - Assert.hasLength(launchId, "The TaskExecution for id " + id + " did not have an externalExecutionId"); + if (!StringUtils.hasText(launchId)) { + logger.warn("Did not find External execution ID for taskName = [{}], taskId = [{}]. Nothing to clean up.", taskExecution.getTaskName(), id); + return; + } TaskDeployment taskDeployment = this.taskDeploymentRepository.findByTaskDeploymentId(launchId); if (taskDeployment == null) { - logger.warn(String.format("Did not find TaskDeployment for taskName = [%s], taskId = [%s]. Nothing to clean up.", - taskExecution.getTaskName(), id)); + logger.warn("Did not find TaskDeployment for taskName = [{}], taskId = [{}]. Nothing to clean up.", taskExecution.getTaskName(), id); return; } Launcher launcher = launcherRepository.findByName(taskDeployment.getPlatformName()); if (launcher != null) { TaskLauncher taskLauncher = launcher.getTaskLauncher(); taskLauncher.cleanup(launchId); + } else { + logger.info("Could clean up execution for task id " + id + ". Did not find a task platform named " + taskDeployment.getPlatformName()); } - else { - logger.info( - "Could clean up execution for task id " + id + ". Did not find a task platform named " + - taskDeployment.getPlatformName()); + } + + @Override + @Transactional + public void cleanupExecutions(Set actionsAsSet, String taskName, boolean completed) { + cleanupExecutions(actionsAsSet, taskName, completed, null); + } + + @Override + @Transactional + public void cleanupExecutions(Set actionsAsSet, String taskName, boolean completed, Integer days) { + List tasks; + if (days != null) { + tasks = this.taskExplorer.findTaskExecutionsBeforeEndTime(taskName, TaskServicesDateUtils.numDaysAgoFromLocalMidnightToday(days)); + } else { + tasks = this.taskExplorer.findTaskExecutions(taskName, completed); } + final Set parentExecutions = new HashSet<>(); + final Set childExecutions = new HashSet<>(); + boolean removeData = actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA); + boolean cleanUp = actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP); + for (TaskExecution taskExecution : tasks) { + if (taskExecution.getParentExecutionId() == null) { + parentExecutions.add(taskExecution); + } else { + childExecutions.add(taskExecution); + } + } + if (cleanUp) { + for (TaskExecution taskExecution : tasks) { + this.performCleanupExecution(taskExecution.getExecutionId()); + } + } + + if (removeData) { + if (!childExecutions.isEmpty()) { + deleteTaskExecutions(childExecutions); + } + if (!parentExecutions.isEmpty()) { + SortedSet parentIds = parentExecutions + .stream() + .map(TaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + List children = this.taskExplorer.findChildTaskExecutions(parentIds); + SortedSet childIds = children + .stream() + .map(TaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + if(childIds.size() > 0) { + this.performDeleteTaskExecutions(childIds); + } + if(parentIds.size() > 0) { + this.performDeleteTaskExecutions(parentIds); + } + } + } + } + + private void deleteTaskExecutions(Collection taskExecutions) { + List executions = taskExecutions.stream() + .collect(Collectors.toList()); + SortedSet executionIds = executions + .stream() + .map(TaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + this.performDeleteTaskExecutions(executionIds); } @Override + @Transactional public void cleanupExecutions(Set actionsAsSet, Set ids) { - final SortedSet nonExistingTaskExecutions = new TreeSet<>(); - final SortedSet nonParentTaskExecutions = new TreeSet<>(); - final SortedSet deletableTaskExecutions = new TreeSet<>(); + performCleanupExecutions(actionsAsSet, ids); + } + private void performCleanupExecutions(Set actionsAsSet, Set ids) { + final SortedSet nonExistingTaskExecutions = new TreeSet<>(); + final SortedSet parentExecutions = new TreeSet<>(); + final SortedSet childExecutions = new TreeSet<>(); + boolean removeData = actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA); + boolean cleanUp = actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP); for (Long id : ids) { final TaskExecution taskExecution = this.taskExplorer.getTaskExecution(id); if (taskExecution == null) { nonExistingTaskExecutions.add(id); - } - else { - final Long parentExecutionId = taskExecution.getParentExecutionId(); - - if (parentExecutionId != null) { - nonParentTaskExecutions.add(parentExecutionId); - } - else { - deletableTaskExecutions.add(taskExecution.getExecutionId()); - } + } else if (taskExecution.getParentExecutionId() == null) { + parentExecutions.add(taskExecution.getExecutionId()); + } else { + childExecutions.add(taskExecution.getExecutionId()); } } - if (!nonExistingTaskExecutions.isEmpty()) { if (nonExistingTaskExecutions.size() == 1) { throw new NoSuchTaskExecutionException(nonExistingTaskExecutions.first()); - } - else { + } else { throw new NoSuchTaskExecutionException(nonExistingTaskExecutions); } } - if (actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP)) { + if (cleanUp) { for (Long id : ids) { - this.cleanupExecution(id); + this.performCleanupExecution(id); } } - if (actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA)) { - if (!deletableTaskExecutions.isEmpty()) { - this.deleteTaskExecutions(deletableTaskExecutions); - } - // delete orphaned child execution ids - else if (deletableTaskExecutions.isEmpty() && !nonParentTaskExecutions.isEmpty()) { - this.deleteTaskExecutions(nonParentTaskExecutions); + + if (removeData) { + if (!childExecutions.isEmpty()) { + this.performDeleteTaskExecutions(childExecutions); } - else if (!nonParentTaskExecutions.isEmpty()) { - throw new CannotDeleteNonParentTaskExecutionException(nonParentTaskExecutions); + if (!parentExecutions.isEmpty()) { + List children = this.taskExplorer.findChildTaskExecutions(parentExecutions); + if (!children.isEmpty()) { + this.deleteTaskExecutions(children); + } + this.performDeleteTaskExecutions(parentExecutions); } } - } @Override @Transactional public void deleteTaskExecutions(Set taskExecutionIds) { + performDeleteTaskExecutions(taskExecutionIds); + } + + @Override + public void deleteTaskExecutions(String taskName, boolean onlyCompleted) { + List taskExecutions = this.taskExplorer.findTaskExecutions(taskName, onlyCompleted); + + Set executionIds = taskExecutions + .stream() + .map(TaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + performDeleteTaskExecutions(executionIds); + } + + private void performDeleteTaskExecutions(Set taskExecutionIds) { + logger.info("performDeleteTaskExecutions:{}", taskExecutionIds); Assert.notEmpty(taskExecutionIds, "You must provide at least 1 task execution id."); final Set taskExecutionIdsWithChildren = new HashSet<>(taskExecutionIds); - final Set childTaskExecutionIds = dataflowTaskExecutionDao.findChildTaskExecutionIds(taskExecutionIds); - logger.info("Found {} child task execution ids: {}.", childTaskExecutionIds.size(), StringUtils.collectionToCommaDelimitedString(childTaskExecutionIds)); + logger.info("Found {} child task execution ids: {}.", + childTaskExecutionIds.size(), + StringUtils.collectionToCommaDelimitedString(childTaskExecutionIds)); taskExecutionIdsWithChildren.addAll(childTaskExecutionIds); final Map auditData = new LinkedHashMap<>(); @@ -232,97 +345,204 @@ public void deleteTaskExecutions(Set taskExecutionIds) { auditData.put("Deleted # of Job Executions", jobExecutionIds.size()); auditData.put("Deleted Job Execution IDs", StringUtils.collectionToDelimitedString(jobExecutionIds, ", ")); - if (!jobExecutionIds.isEmpty()) { - final Set stepExecutionIds = dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIds); - - final int numberOfDeletedBatchStepExecutionContextRows; - if (!stepExecutionIds.isEmpty()) { - numberOfDeletedBatchStepExecutionContextRows = dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds); - } - else { - numberOfDeletedBatchStepExecutionContextRows = 0; - } + int chunkSize = getTaskExecutionDeleteChunkSize(this.dataSource); - final int numberOfDeletedBatchStepExecutionRows = dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedBatchJobExecutionContextRows = dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedBatchJobExecutionParamRows = dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedBatchJobExecutionRows = dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIds); - final int numberOfDeletedUnusedBatchJobInstanceRows = dataflowJobExecutionDao.deleteUnusedBatchJobInstances(); - - logger.info("Deleted the following Batch Job Execution related data for {} Job Executions.\n" + - "Batch Step Execution Context Rows: {}\n" + - "Batch Step Executions Rows: {}\n" + - "Batch Job Execution Context Rows: {}\n" + - "Batch Job Execution Param Rows: {}\n" + - "Batch Job Execution Rows: {}\n" + - "Batch Job Instance Rows: {}.", - jobExecutionIds.size(), - numberOfDeletedBatchStepExecutionContextRows, - numberOfDeletedBatchStepExecutionRows, - numberOfDeletedBatchJobExecutionContextRows, - numberOfDeletedBatchJobExecutionParamRows, - numberOfDeletedBatchJobExecutionRows, - numberOfDeletedUnusedBatchJobInstanceRows - ); - - auditData.put("Batch Step Execution Context", numberOfDeletedBatchStepExecutionContextRows); - auditData.put("Batch Step Executions", numberOfDeletedBatchStepExecutionRows); - auditData.put("Batch Job Execution Context Rows", numberOfDeletedBatchJobExecutionContextRows); - auditData.put("Batch Job Execution Params", numberOfDeletedBatchJobExecutionParamRows); - auditData.put("Batch Job Executions", numberOfDeletedBatchJobExecutionRows); - auditData.put("Batch Job Instance Rows", numberOfDeletedUnusedBatchJobInstanceRows); + if (!jobExecutionIds.isEmpty()) { + deleteRelatedJobAndStepExecutions(jobExecutionIds, auditData, chunkSize); } // Delete Task Related Data auditData.put("Deleted # of Task Executions", taskExecutionIdsWithChildren.size()); auditData.put("Deleted Task Execution IDs", StringUtils.collectionToDelimitedString(taskExecutionIdsWithChildren, ", ")); - final int numberOfDeletedTaskExecutionParamRows = dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdsWithChildren); - final int numberOfDeletedTaskTaskBatchRelationshipRows = dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds(taskExecutionIdsWithChildren); - final int numberOfDeletedTaskManifestRows = this.dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdsWithChildren); - final int numberOfDeletedTaskExecutionRows = dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdsWithChildren); - - logger.info("Deleted the following Task Execution related data for {} Task Executions:\n" + - "Task Execution Param Rows: {}\n" + - "Task Batch Relationship Rows: {}\n" + - "Task Manifest Rows: {}\n" + - "Task Execution Rows: {}.", + + final AtomicInteger numberOfDeletedTaskExecutionParamRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskTaskBatchRelationshipRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskManifestRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskExecutionRows = new AtomicInteger(0); + + if (chunkSize <= 0) { + numberOfDeletedTaskExecutionParamRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdsWithChildren)); + numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds( + taskExecutionIdsWithChildren)); + numberOfDeletedTaskManifestRows.addAndGet(dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdsWithChildren)); + numberOfDeletedTaskExecutionRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdsWithChildren)); + } else { + split(taskExecutionIdsWithChildren, chunkSize).forEach(taskExecutionIdSubsetList -> { + Set taskExecutionIdSubset = new HashSet<>(taskExecutionIdSubsetList); + numberOfDeletedTaskExecutionParamRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdSubset)); + numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds( + taskExecutionIdSubset)); + numberOfDeletedTaskManifestRows.addAndGet(dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdSubset)); + numberOfDeletedTaskExecutionRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdSubset)); + }); + } + + logger.info(""" + Deleted the following Task Execution related data for {} Task Executions: + Task Execution Param Rows: {} + Task Batch Relationship Rows: {} + Task Manifest Rows: {} + Task Execution Rows: {}.""", taskExecutionIdsWithChildren.size(), numberOfDeletedTaskExecutionParamRows, numberOfDeletedTaskTaskBatchRelationshipRows, numberOfDeletedTaskManifestRows, numberOfDeletedTaskExecutionRows - ); + ); // Populate Audit Record - auditRecordService.populateAndSaveAuditRecordUsingMapData( - AuditOperationType.TASK, AuditActionType.DELETE, - taskExecutionIdsWithChildren.size() + " Task Execution Delete(s)", auditData, null); + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.TASK, + AuditActionType.DELETE, + taskExecutionIdsWithChildren.size() + " Task Execution Delete(s)", + auditData, + null); + } + + private void deleteRelatedJobAndStepExecutions(Set jobExecutionIds, Map auditData, int chunkSize) { + + final Set stepExecutionIds = findStepExecutionIds(jobExecutionIds, chunkSize); + + final AtomicInteger numberOfDeletedBatchStepExecutionContextRows = new AtomicInteger(0); + if (!stepExecutionIds.isEmpty()) { + deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds, chunkSize, numberOfDeletedBatchStepExecutionContextRows); + } + deleteStepAndJobExecutionsByJobExecutionId(jobExecutionIds, chunkSize, auditData, numberOfDeletedBatchStepExecutionContextRows); + + } + + private Set findStepExecutionIds(Set jobExecutionIds, int chunkSize) { + final Set stepExecutionIds = ConcurrentHashMap.newKeySet(); + if (chunkSize <= 0) { + stepExecutionIds.addAll(dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIds)); + } else { + split(jobExecutionIds, chunkSize).forEach(jobExecutionIdSubsetList -> { + Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); + stepExecutionIds.addAll(dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIdSubset)); + }); + } + + return stepExecutionIds; + } + + private void deleteBatchStepExecutionContextByStepExecutionIds( + Set stepExecutionIds, + int chunkSize, + AtomicInteger numberOfDeletedBatchStepExecutionContextRows) { + if (chunkSize <= 0) { + numberOfDeletedBatchStepExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds)); + + } else { + split(stepExecutionIds, chunkSize).forEach(stepExecutionIdSubsetList -> { + Set stepExecutionIdSubset = new HashSet<>(stepExecutionIdSubsetList); + numberOfDeletedBatchStepExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds( + stepExecutionIdSubset)); + }); + } + } + + private void deleteStepAndJobExecutionsByJobExecutionId( + Set jobExecutionIds, + int chunkSize, + Map auditData, + AtomicInteger numberOfDeletedBatchStepExecutionContextRows) { + final AtomicInteger numberOfDeletedBatchStepExecutionRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedBatchJobExecutionContextRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedBatchJobExecutionParamRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedBatchJobExecutionRows = new AtomicInteger(0); + + if (chunkSize <= 0) { + numberOfDeletedBatchStepExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionParamRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIds)); + } else { + split(jobExecutionIds, chunkSize).forEach(jobExecutionIdSubsetList -> { + Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); + numberOfDeletedBatchStepExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds( + jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionParamRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIdSubset)); + }); + } + + final int numberOfDeletedUnusedBatchJobInstanceRows = dataflowJobExecutionDao.deleteUnusedBatchJobInstances(); + + logger.info("Deleted the following Batch Job Execution related data for {} Job Executions.\n" + "Batch Step Execution Context Rows: {}\n" + "Batch Step Executions Rows: {}\n" + "Batch Job Execution Context Rows: {}\n" + "Batch Job Execution Param Rows: {}\n" + "Batch Job Execution Rows: {}\n" + "Batch Job Instance Rows: {}.", + jobExecutionIds.size(), + numberOfDeletedBatchStepExecutionContextRows, + numberOfDeletedBatchStepExecutionRows, + numberOfDeletedBatchJobExecutionContextRows, + numberOfDeletedBatchJobExecutionParamRows, + numberOfDeletedBatchJobExecutionRows, + numberOfDeletedUnusedBatchJobInstanceRows); + + auditData.put("Batch Step Execution Context", numberOfDeletedBatchStepExecutionContextRows); + auditData.put("Batch Step Executions", numberOfDeletedBatchStepExecutionRows); + auditData.put("Batch Job Execution Context Rows", numberOfDeletedBatchJobExecutionContextRows); + auditData.put("Batch Job Execution Params", numberOfDeletedBatchJobExecutionParamRows); + auditData.put("Batch Job Executions", numberOfDeletedBatchJobExecutionRows); + auditData.put("Batch Job Instance Rows", numberOfDeletedUnusedBatchJobInstanceRows); + } + + /** + * Determines the maximum chunk size for a given database type. If {@code taskDeleteChunkSize} is + * greater than zero this overrides the chunk size for the specific database type. + * If the database type has no fixed number of maximum elements allowed in the {@code IN} clause + * then zero is returned. + * + * @param dataSource the datasource used by data flow. + * @return the chunk size to be used for deleting task executions. + */ + private int getTaskExecutionDeleteChunkSize(DataSource dataSource) { + int result = this.taskDeleteChunkSize; + if (this.taskDeleteChunkSize < 1) { + try { + DatabaseType databaseType = DatabaseType.fromMetaData(dataSource); + String name = databaseType.name(); + if (name.equals("SQLSERVER")) { + result = SQL_SERVER_CHUNK_SIZE; + } + if (name.startsWith("ORACLE")) { + result = ORACLE_SERVER_CHUNK_SIZE; + } + } catch (MetaDataAccessException mdae) { + logger.warn("Unable to retrieve metadata for database when deleting task executions", mdae); + } + } + return result; + } + + static Collection> split(Collection input, int max) { + final AtomicInteger count = new AtomicInteger(0); + return input.stream().collect(Collectors.groupingBy(s -> count.getAndIncrement() / max)).values(); } @Override public void deleteTaskDefinition(String name) { - TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(name) - .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); + TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(name).orElseThrow(() -> new NoSuchTaskDefinitionException(name)); deleteTaskDefinition(taskDefinition); - auditRecordService.populateAndSaveAuditRecord( - AuditOperationType.TASK, AuditActionType.DELETE, - taskDefinition.getTaskName(), this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), null); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.TASK, + AuditActionType.DELETE, + taskDefinition.getTaskName(), + this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), + null); } @Override public void deleteTaskDefinition(String name, boolean cleanup) { if (cleanup) { - Set taskExecutionIds = this.dataflowTaskExecutionDao.getTaskExecutionIdsByTaskName(name); - final Set actionsAsSet = new HashSet<>(); - actionsAsSet.add(TaskExecutionControllerDeleteAction.CLEANUP); - actionsAsSet.add(TaskExecutionControllerDeleteAction.REMOVE_DATA); - if (!taskExecutionIds.isEmpty()) { - cleanupExecutions(actionsAsSet, taskExecutionIds); - } + Set taskExecutionIds = dataflowTaskExecutionDao.getTaskExecutionIdsByTaskName(name); + final Set actionsAsSet = new HashSet<>(); + actionsAsSet.add(TaskExecutionControllerDeleteAction.CLEANUP); + actionsAsSet.add(TaskExecutionControllerDeleteAction.REMOVE_DATA); + if (!taskExecutionIds.isEmpty()) { + performCleanupExecutions(actionsAsSet, taskExecutionIds); + } } this.deleteTaskDefinition(name); } @@ -334,9 +554,11 @@ public void deleteAll() { for (TaskDefinition taskDefinition : allTaskDefinition) { deleteTaskDefinition(taskDefinition); - auditRecordService.populateAndSaveAuditRecord( - AuditOperationType.TASK, AuditActionType.DELETE, - taskDefinition.getTaskName(), this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), null); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.TASK, + AuditActionType.DELETE, + taskDefinition.getTaskName(), + this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), + null); } } @@ -357,8 +579,7 @@ private void deleteTaskDefinition(TaskDefinition taskDefinition) { } try { destroyChildTask(childTaskPrefix + childName); - } - catch (ObjectOptimisticLockingFailureException e) { + } catch (ObjectOptimisticLockingFailureException e) { logger.warn("Attempted delete on a child task that is currently being deleted"); } }); @@ -366,14 +587,13 @@ private void deleteTaskDefinition(TaskDefinition taskDefinition) { // destroy normal task or composed parent task try { destroyPrimaryTask(taskDefinition.getTaskName()); - } catch (ObjectOptimisticLockingFailureException e) { - logger.warn(String.format("Attempted delete on task %s that is currently being deleted", taskDefinition.getTaskName())); + } catch (ObjectOptimisticLockingFailureException e) { + logger.warn("Attempted delete on task {} that is currently being deleted", taskDefinition.getTaskName()); } } private void destroyPrimaryTask(String name) { - TaskDefinition taskDefinition = taskDefinitionRepository.findById(name) - .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); + TaskDefinition taskDefinition = taskDefinitionRepository.findById(name).orElseThrow(() -> new NoSuchTaskDefinitionException(name)); destroyTask(taskDefinition); } @@ -384,18 +604,31 @@ private void destroyChildTask(String name) { private void destroyTask(TaskDefinition taskDefinition) { taskDefinitionRepository.deleteById(taskDefinition.getName()); - TaskDeployment taskDeployment = - this.taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskDefinition.getTaskName()); + TaskDeployment taskDeployment = this.taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskDefinition.getTaskName()); if (taskDeployment != null) { Launcher launcher = launcherRepository.findByName(taskDeployment.getPlatformName()); if (launcher != null) { TaskLauncher taskLauncher = launcher.getTaskLauncher(); taskLauncher.destroy(taskDefinition.getName()); } + } else { + if (!findAndDeleteTaskResourcesAcrossPlatforms(taskDefinition)) { + logger.info("TaskLauncher.destroy not invoked for task " + taskDefinition.getTaskName() + ". Did not find a previously launched task to destroy."); + } } - else { - logger.info("TaskLauncher.destroy not invoked for task " + - taskDefinition.getTaskName() + ". Did not find a previously launched task to destroy."); + } + + private boolean findAndDeleteTaskResourcesAcrossPlatforms(TaskDefinition taskDefinition) { + boolean result = false; + for (Launcher launcher : launcherRepository.findAll()) { + try { + launcher.getTaskLauncher().destroy(taskDefinition.getName()); + logger.info("Deleted task app resources for {} in platform {}", taskDefinition.getName(), launcher.getName()); + result = true; + } catch (Exception ex) { + logger.info("Attempted delete of app resources for {} but none found on platform {}.", taskDefinition.getName(), launcher.getName()); + } } + return result; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java index 809c0cacc8..a4992740ef 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java @@ -19,8 +19,13 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.cloud.dataflow.core.AllPlatformsTaskExecutionInformation; @@ -37,11 +42,12 @@ import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.core.io.Resource; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * Default implementation of the {@link DefaultTaskExecutionInfoService} interface. @@ -59,8 +65,10 @@ * @author Michael Wirth * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis */ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService { + private final static Logger logger = LoggerFactory.getLogger(DefaultTaskExecutionInfoService.class); private final DataSourceProperties dataSourceProperties; @@ -72,7 +80,7 @@ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService /** * Used to read TaskExecutions. */ - private final TaskExplorer taskExplorer; + private final DataflowTaskExplorer taskExplorer; private final TaskDefinitionRepository taskDefinitionRepository; @@ -87,48 +95,26 @@ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService /** * Initializes the {@link DefaultTaskExecutionInfoService}. * - * @param dataSourceProperties the data source properties. - * @param appRegistryService URI registry this service will use to look up app URIs. - * @param taskExplorer the explorer this service will use to lookup task executions - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will - * use for task CRUD operations. - * @param taskConfigurationProperties the properties used to define the behavior of tasks - * @param launcherRepository the launcher repository - * @param taskPlatforms the task platforms - */ - @Deprecated - public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties, - AppRegistryService appRegistryService, - TaskExplorer taskExplorer, - TaskDefinitionRepository taskDefinitionRepository, - TaskConfigurationProperties taskConfigurationProperties, - LauncherRepository launcherRepository, - List taskPlatforms) { - this(dataSourceProperties, appRegistryService, taskExplorer, taskDefinitionRepository, - taskConfigurationProperties, launcherRepository, taskPlatforms, null); - } - - /** - * Initializes the {@link DefaultTaskExecutionInfoService}. - * - * @param dataSourceProperties the data source properties. - * @param appRegistryService URI registry this service will use to look up app URIs. - * @param taskExplorer the explorer this service will use to lookup task executions - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will - * use for task CRUD operations. - * @param taskConfigurationProperties the properties used to define the behavior of tasks - * @param launcherRepository the launcher repository - * @param taskPlatforms the task platforms + * @param dataSourceProperties the data source properties. + * @param appRegistryService URI registry this service will use to look up app URIs. + * @param taskExplorer the explorer this service will use to lookup task executions + * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will + * use for task CRUD operations. + * @param taskConfigurationProperties the properties used to define the behavior of tasks + * @param launcherRepository the launcher repository + * @param taskPlatforms the task platforms * @param composedTaskRunnerConfigurationProperties the properties used to define the behavior of CTR */ - public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties, - AppRegistryService appRegistryService, - TaskExplorer taskExplorer, - TaskDefinitionRepository taskDefinitionRepository, - TaskConfigurationProperties taskConfigurationProperties, - LauncherRepository launcherRepository, - List taskPlatforms, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + public DefaultTaskExecutionInfoService( + DataSourceProperties dataSourceProperties, + AppRegistryService appRegistryService, + DataflowTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, + TaskConfigurationProperties taskConfigurationProperties, + LauncherRepository launcherRepository, + List taskPlatforms, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { Assert.notNull(dataSourceProperties, "DataSourceProperties must not be null"); Assert.notNull(appRegistryService, "AppRegistryService must not be null"); Assert.notNull(taskDefinitionRepository, "TaskDefinitionRepository must not be null"); @@ -148,8 +134,10 @@ public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties } @Override - public TaskExecutionInformation findTaskExecutionInformation(String taskName, - Map taskDeploymentProperties, boolean addDatabaseCredentials, Map previousTaskDeploymentProperties) { + public TaskExecutionInformation findTaskExecutionInformation( + String taskName, + Map taskDeploymentProperties, boolean addDatabaseCredentials, Map previousTaskDeploymentProperties + ) { Assert.hasText(taskName, "The provided taskName must not be null or empty."); Assert.notNull(taskDeploymentProperties, "The provided runtimeProperties must not be null."); @@ -157,11 +145,11 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, taskExecutionInformation.setTaskDeploymentProperties(taskDeploymentProperties); TaskDefinition originalTaskDefinition = taskDefinitionRepository.findById(taskName) - .orElseThrow(() -> new NoSuchTaskDefinitionException(taskName)); + .orElseThrow(() -> new NoSuchTaskDefinitionException(taskName)); //TODO: This normally called by JPA automatically but `AutoCreateTaskDefinitionTests` fails without this. originalTaskDefinition.initialize(); TaskParser taskParser = new TaskParser(originalTaskDefinition.getName(), originalTaskDefinition.getDslText(), - true, true); + true, true); TaskNode taskNode = taskParser.parse(); // if composed task definition replace definition with one composed task // runner and executable graph. @@ -169,34 +157,30 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, AppRegistration appRegistration; if (taskNode.isComposed()) { taskDefinitionToUse = new TaskDefinition(originalTaskDefinition.getName(), - TaskServiceUtils.createComposedTaskDefinition(taskNode.toExecutableDSL())); + TaskServiceUtils.createComposedTaskDefinition(taskNode.toExecutableDSL())); taskExecutionInformation.setTaskDeploymentProperties( - TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, - taskNode)); + TaskServiceUtils.establishComposedTaskProperties(taskDeploymentProperties, + taskNode)); taskDefinitionToUse = TaskServiceUtils.updateTaskProperties(taskDefinitionToUse, - dataSourceProperties, addDatabaseCredentials); + dataSourceProperties, addDatabaseCredentials); try { appRegistration = new AppRegistration(ComposedTaskRunnerConfigurationProperties.COMPOSED_TASK_RUNNER_NAME, - ApplicationType.task, - new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.taskConfigurationProperties, - this.composedTaskRunnerConfigurationProperties))); - } - catch (URISyntaxException e) { + ApplicationType.task, + new URI(this.composedTaskRunnerConfigurationProperties.getUri())); + } catch (URISyntaxException e) { throw new IllegalStateException("Invalid Compose Task Runner Resource", e); } - } - else { + } else { taskDefinitionToUse = TaskServiceUtils.updateTaskProperties(originalTaskDefinition, - dataSourceProperties, addDatabaseCredentials); + dataSourceProperties, addDatabaseCredentials); String label = null; if (taskNode.getTaskApp() != null) { TaskAppNode taskAppNode = taskNode.getTaskApp(); if (taskAppNode.getLabel() != null) { label = taskAppNode.getLabel().stringValue(); - } - else { + } else { label = taskAppNode.getName(); } } @@ -208,11 +192,10 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, // if we have version, use that or rely on default version set if (version == null) { appRegistration = appRegistryService.find(taskDefinitionToUse.getRegisteredAppName(), - ApplicationType.task); - } - else { + ApplicationType.task); + } else { appRegistration = appRegistryService.find(taskDefinitionToUse.getRegisteredAppName(), - ApplicationType.task, version); + ApplicationType.task, version); } } @@ -226,6 +209,70 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, return taskExecutionInformation; } + @Override + public Set composedTaskChildNames(String taskName) { + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + TaskParser taskParser = new TaskParser(taskDefinition.getTaskName(), taskDefinition.getDslText(), true, true); + Set result = new HashSet<>(); + TaskNode taskNode = taskParser.parse(); + if (taskNode.isComposed()) { + extractNames(taskNode, result); + } + return result; + } + + @Override + public Set taskNames(String taskName) { + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + TaskParser taskParser = new TaskParser(taskDefinition.getTaskName(), taskDefinition.getDslText(), true, true); + Set result = new HashSet<>(); + TaskNode taskNode = taskParser.parse(); + extractNames(taskNode, result); + return result; + } + + private void extractNames(TaskNode taskNode, Set result) { + for (TaskApp subTask : taskNode.getTaskApps()) { + logger.debug("subTask:{}:{}:{}:{}", subTask.getName(), subTask.getTaskName(), subTask.getLabel(), subTask); + TaskDefinition subTaskDefinition = taskDefinitionRepository.findByTaskName(subTask.getName()); + if (subTaskDefinition != null) { + if(StringUtils.hasText(subTask.getLabel())) { + result.add(subTaskDefinition.getRegisteredAppName() + "," + subTask.getLabel()); + } else { + result.add(subTaskDefinition.getRegisteredAppName()); + } + TaskParser subTaskParser = new TaskParser(subTaskDefinition.getTaskName(), subTaskDefinition.getDslText(), true, true); + TaskNode subTaskNode = subTaskParser.parse(); + if (subTaskNode != null && subTaskNode.getTaskApp() != null) { + for (TaskApp subSubTask : subTaskNode.getTaskApps()) { + logger.debug("subSubTask:{}:{}:{}:{}", subSubTask.getName(), subSubTask.getTaskName(), subSubTask.getLabel(), subSubTask); + TaskDefinition subSubTaskDefinition = taskDefinitionRepository.findByTaskName(subSubTask.getName()); + if (subSubTaskDefinition != null) { + if (subSubTask.getLabel() != null && !subTask.getLabel().contains("$")) { + result.add(subSubTaskDefinition.getRegisteredAppName() + "," + subSubTask.getLabel()); + } else { + result.add(subSubTaskDefinition.getRegisteredAppName()); + } + } + } + } + } else { + if ((subTask.getLabel() == null || subTask.getLabel().equals(subTask.getName())) && !subTask.getName().contains("$")) { + result.add(subTask.getName()); + } else { + if (!subTask.getName().contains("$") && !subTask.getLabel().contains("$")) { + result.add(subTask.getName() + "," + subTask.getLabel()); + } else if (!subTask.getName().contains("$")) { + result.add(subTask.getName()); + } else if (!subTask.getTaskName().contains("$")) { + result.add(subTask.getTaskName()); + } + } + } + } + } + + @Override public List createTaskDeploymentRequests(String taskName, String dslText) { List appDeploymentRequests = new ArrayList<>(); TaskParser taskParser = new TaskParser(taskName, dslText, true, true); @@ -239,7 +286,7 @@ public List createTaskDeploymentRequests(String taskName, TaskNode subTaskNode = subTaskParser.parse(); String subTaskName = subTaskNode.getTaskApp().getName(); AppRegistration appRegistration = appRegistryService.find(subTaskName, - ApplicationType.task); + ApplicationType.task); Assert.notNull(appRegistration, "Unknown task app: " + subTask.getName()); Resource appResource = appRegistryService.getAppResource(appRegistration); @@ -248,12 +295,13 @@ public List createTaskDeploymentRequests(String taskName, AppDefinition appDefinition = new AppDefinition(subTask.getName(), subTaskNode.getTaskApp().getArgumentsAsMap()); AppDeploymentRequest appDeploymentRequest = new AppDeploymentRequest(appDefinition, - appResource, null, null); + appResource, null, null); appDeploymentRequests.add(appDeploymentRequest); } } return appDeploymentRequests; } + @Override public AllPlatformsTaskExecutionInformation findAllPlatformTaskExecutionInformation() { return new AllPlatformsTaskExecutionInformation(this.taskPlatforms); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java index 8b07be2e57..43445d884a 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java @@ -30,16 +30,19 @@ @Transactional public class DefaultTaskExecutionRepositoryService implements TaskExecutionCreationService { - private TaskRepository taskRepository; + private final TaskRepository taskRepository; - public DefaultTaskExecutionRepositoryService(TaskRepository taskRepository) { + + public DefaultTaskExecutionRepositoryService( + TaskRepository taskRepository) { Assert.notNull(taskRepository, "taskRepository must not be null"); this.taskRepository = taskRepository; + } @Override @Transactional(propagation = Propagation.REQUIRES_NEW) - public TaskExecution createTaskExecution(String taskName) { + public TaskExecution createTaskExecution(String taskName, String version) { return taskRepository.createTaskExecution(taskName); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java index cab054f0a0..32c6bfd274 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,9 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -27,6 +29,7 @@ import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -37,6 +40,7 @@ import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.core.AuditActionType; import org.springframework.cloud.dataflow.core.AuditOperationType; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskDeployment; @@ -52,6 +56,7 @@ import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; import org.springframework.cloud.dataflow.server.repository.TaskExecutionMissingExternalIdException; import org.springframework.cloud.dataflow.server.service.TaskExecutionCreationService; @@ -60,12 +65,16 @@ import org.springframework.cloud.dataflow.server.service.TaskSaveService; import org.springframework.cloud.dataflow.server.service.impl.diff.TaskAnalysisReport; import org.springframework.cloud.dataflow.server.service.impl.diff.TaskAnalyzer; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.server.task.DataflowTaskExplorer; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; import org.springframework.cloud.deployer.spi.task.LaunchState; import org.springframework.cloud.deployer.spi.task.TaskLauncher; +import org.springframework.cloud.task.listener.TaskException; +import org.springframework.cloud.task.listener.TaskExecutionException; import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskRepository; +import org.springframework.core.env.PropertyResolver; import org.springframework.core.io.Resource; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; @@ -88,6 +97,7 @@ * @author Michael Wirth * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis */ @Transactional public class DefaultTaskExecutionService implements TaskExecutionService { @@ -124,13 +134,15 @@ public class DefaultTaskExecutionService implements TaskExecutionService { private final TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator; - private final TaskExplorer taskExplorer; + private final DataflowTaskExplorer taskExplorer; private final DataflowTaskExecutionDao dataflowTaskExecutionDao; private final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao; - private OAuth2TokenUtilsService oauth2TokenUtilsService; + private final OAuth2TokenUtilsService oauth2TokenUtilsService; + + private final TaskDefinitionRepository taskDefinitionRepository; private final Map> tasksBeingUpgraded = new ConcurrentHashMap<>(); @@ -140,79 +152,61 @@ public class DefaultTaskExecutionService implements TaskExecutionService { private boolean autoCreateTaskDefinitions; - private TaskConfigurationProperties taskConfigurationProperties; + private final TaskConfigurationProperties taskConfigurationProperties; - private ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + private final ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; - /** - * Initializes the {@link DefaultTaskExecutionService}. - * - * @param launcherRepository the repository of task launcher used to launch task apps. - * @param auditRecordService the audit record service - * @param taskRepository the repository to use for accessing and updating task executions - * @param taskExecutionInfoService the task execution info service - * @param taskDeploymentRepository the repository to track task deployment - * @param taskExecutionInfoService the service used to setup a task execution - * @param taskExecutionRepositoryService the service used to create the task execution - * @param taskAppDeploymentRequestCreator the task app deployment request creator - * @param taskExplorer the task explorer - * @param dataflowTaskExecutionDao the dataflow task execution dao - * @param dataflowTaskExecutionMetadataDao repository used to manipulate task manifests - * @param oauth2TokenUtilsService the oauth2 token server - * @param taskSaveService the task save service - */ - @Deprecated - public DefaultTaskExecutionService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService, - TaskConfigurationProperties taskConfigurationProperties) { - this(launcherRepository, auditRecordService, taskRepository, taskExecutionInfoService, taskDeploymentRepository, - taskExecutionRepositoryService, taskAppDeploymentRequestCreator, taskExplorer, dataflowTaskExecutionDao, - dataflowTaskExecutionMetadataDao, oauth2TokenUtilsService, taskSaveService, taskConfigurationProperties, - null); - } + private final DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao; + + private final PropertyResolver propertyResolver; + + private static final Pattern TASK_NAME_PATTERN = Pattern.compile("[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?"); + + private static final String TASK_NAME_VALIDATION_MSG = "Task name must consist of alphanumeric characters " + + "or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', " + + "or 'abc-123')"; /** * Initializes the {@link DefaultTaskExecutionService}. * - * @param launcherRepository the repository of task launcher used to launch task apps. - * @param auditRecordService the audit record service - * @param taskRepository the repository to use for accessing and updating task executions - * @param taskExecutionInfoService the task execution info service - * @param taskDeploymentRepository the repository to track task deployment - * @param taskExecutionInfoService the service used to setup a task execution - * @param taskExecutionRepositoryService the service used to create the task execution - * @param taskAppDeploymentRequestCreator the task app deployment request creator - * @param taskExplorer the task explorer - * @param dataflowTaskExecutionDao the dataflow task execution dao - * @param dataflowTaskExecutionMetadataDao repository used to manipulate task manifests - * @param oauth2TokenUtilsService the oauth2 token server - * @param taskSaveService the task save service + * @param propertyResolver the spring application context + * @param launcherRepository the repository of task launcher used to launch task apps. + * @param auditRecordService the audit record service + * @param taskRepository the repository to use for accessing and updating task executions + * @param taskExecutionInfoService the task execution info service + * @param taskDeploymentRepository the repository to track task deployment + * @param taskDefinitionRepository the repository to query the task definition + * @param taskExecutionRepositoryService the service used to create the task execution + * @param taskAppDeploymentRequestCreator the task app deployment request creator + * @param taskExplorer the task explorer + * @param dataflowTaskExecutionDao the dataflow task execution dao + * @param dataflowTaskExecutionMetadataDao repository used to manipulate task manifests + * @param dataflowTaskExecutionQueryDao repository to query aggregate task execution data. + * @param oauth2TokenUtilsService the oauth2 token server + * @param taskSaveService the task save service + * @param taskConfigurationProperties task configuration properties * @param composedTaskRunnerConfigurationProperties properties used to configure the composed task runner */ - public DefaultTaskExecutionService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService, - TaskConfigurationProperties taskConfigurationProperties, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + public DefaultTaskExecutionService( + PropertyResolver propertyResolver, + LauncherRepository launcherRepository, + AuditRecordService auditRecordService, + TaskRepository taskRepository, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeploymentRepository taskDeploymentRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskExecutionCreationService taskExecutionRepositoryService, + TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, + DataflowTaskExplorer taskExplorer, + DataflowTaskExecutionDao dataflowTaskExecutionDao, + DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, + OAuth2TokenUtilsService oauth2TokenUtilsService, + TaskSaveService taskSaveService, + TaskConfigurationProperties taskConfigurationProperties, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { + Assert.notNull(propertyResolver, "propertyResolver must not be null"); Assert.notNull(launcherRepository, "launcherRepository must not be null"); Assert.notNull(auditRecordService, "auditRecordService must not be null"); Assert.notNull(taskExecutionInfoService, "taskExecutionInfoService must not be null"); @@ -226,13 +220,16 @@ public DefaultTaskExecutionService(LauncherRepository launcherRepository, Assert.notNull(dataflowTaskExecutionMetadataDao, "dataflowTaskExecutionMetadataDao must not be null"); Assert.notNull(taskSaveService, "taskSaveService must not be null"); Assert.notNull(taskConfigurationProperties, "taskConfigurationProperties must not be null"); + Assert.notNull(taskDefinitionRepository, "taskDefinitionRepository must not be null"); + this.propertyResolver = propertyResolver; this.oauth2TokenUtilsService = oauth2TokenUtilsService; this.launcherRepository = launcherRepository; this.auditRecordService = auditRecordService; this.taskRepository = taskRepository; this.taskExecutionInfoService = taskExecutionInfoService; this.taskDeploymentRepository = taskDeploymentRepository; + this.taskDefinitionRepository = taskDefinitionRepository; this.taskExecutionRepositoryService = taskExecutionRepositoryService; this.taskAppDeploymentRequestCreator = taskAppDeploymentRequestCreator; this.taskExplorer = taskExplorer; @@ -241,148 +238,191 @@ public DefaultTaskExecutionService(LauncherRepository launcherRepository, this.taskSaveService = taskSaveService; this.taskConfigurationProperties = taskConfigurationProperties; this.composedTaskRunnerConfigurationProperties = composedTaskRunnerConfigurationProperties; + this.dataflowTaskExecutionQueryDao = dataflowTaskExecutionQueryDao; + } /** * Launch a task. - * @param taskName Name of the task definition or registered task application. - * If a task definition does not exist, one will be created if `autoCreateTask-Definitions` is true. Must not be null or empty. + * + * @param taskName Name of the task definition or registered task application. + * If a task definition does not exist, one will be created if `autoCreateTask-Definitions` is true. Must not be null or empty. * @param taskDeploymentProperties Optional deployment properties. Must not be null. - * @param commandLineArgs Optional runtime commandline argument + * @param commandLineArgs Optional runtime commandline argument * @return the task execution ID. */ @Override - public long executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs) { + public LaunchResponse executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs) { // Get platform name and fallback to 'default' String platformName = getPlatform(taskDeploymentProperties); - + String platformType = StreamSupport.stream(launcherRepository.findAll().spliterator(), true) + .filter(deployer -> deployer.getName().equalsIgnoreCase(platformName)) + .map(Launcher::getType) + .findFirst() + .orElse("unknown"); + if (platformType.equals(TaskPlatformFactory.KUBERNETES_PLATFORM_TYPE) && !TASK_NAME_PATTERN.matcher(taskName).matches()) { + throw new TaskException(String.format("Task name %s is invalid. %s", taskName, TASK_NAME_VALIDATION_MSG)); + } // Naive local state to prevent parallel launches to break things up - if(this.tasksBeingUpgraded.containsKey(taskName)) { + if (this.tasksBeingUpgraded.containsKey(taskName)) { List platforms = this.tasksBeingUpgraded.get(taskName); - if(platforms.contains(platformName)) { + if (platforms.contains(platformName)) { throw new IllegalStateException(String.format( - "Unable to launch %s on platform %s because it is being upgraded", taskName, platformName)); + "Unable to launch %s on platform %s because it is being upgraded", taskName, platformName)); } } Launcher launcher = this.launcherRepository.findByName(platformName); + if (launcher == null) { + throw new IllegalStateException(String.format("No launcher was available for platform %s", platformName)); + } validateTaskName(taskName, launcher); // Remove since the key for task platform name will not pass validation for app, // deployer, or scheduler prefix. // Then validate - if (taskDeploymentProperties.containsKey(TASK_PLATFORM_NAME)) { - taskDeploymentProperties.remove(TASK_PLATFORM_NAME); - } - DeploymentPropertiesUtils.validateDeploymentProperties(taskDeploymentProperties); - + Map deploymentProperties = new HashMap<>(taskDeploymentProperties); + deploymentProperties.remove(TASK_PLATFORM_NAME); + DeploymentPropertiesUtils.validateDeploymentProperties(deploymentProperties); TaskDeployment existingTaskDeployment = taskDeploymentRepository - .findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName); + .findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName); if (existingTaskDeployment != null) { if (!existingTaskDeployment.getPlatformName().equals(platformName)) { throw new IllegalStateException(String.format( - "Task definition [%s] has already been deployed on platform [%s]. " + - "Requested to deploy on platform [%s].", - taskName, existingTaskDeployment.getPlatformName(), platformName)); + "Task definition [%s] has already been deployed on platform [%s]. " + + "Requested to deploy on platform [%s].", + taskName, existingTaskDeployment.getPlatformName(), platformName)); } } + List commandLineArguments = new ArrayList<>(commandLineArgs); + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + + String taskAppName = taskDefinition != null ? taskDefinition.getRegisteredAppName() : taskName; // Get the previous manifest - TaskManifest previousManifest = this.dataflowTaskExecutionMetadataDao.getLatestManifest(taskName); + TaskManifest previousManifest = dataflowTaskExecutionMetadataDao.getLatestManifest(taskName); Map previousTaskDeploymentProperties = previousManifest != null - && previousManifest.getTaskDeploymentRequest() != null - && previousManifest.getTaskDeploymentRequest().getDeploymentProperties() != null - ? previousManifest.getTaskDeploymentRequest().getDeploymentProperties() - : Collections.emptyMap(); + && previousManifest.getTaskDeploymentRequest() != null + && previousManifest.getTaskDeploymentRequest().getDeploymentProperties() != null + ? previousManifest.getTaskDeploymentRequest().getDeploymentProperties() + : Collections.emptyMap(); TaskExecutionInformation taskExecutionInformation = findOrCreateTaskExecutionInformation(taskName, - taskDeploymentProperties, launcher.getType(), previousTaskDeploymentProperties); + deploymentProperties, launcher.getType(), previousTaskDeploymentProperties); - // pre prosess command-line args - // moving things like app.

    The input args are copied and entries that begin with {@code 'app.'} + * are replaced with a {@code 'composed-task-app-arguments.'} + * prefixed entry. The transformed arg will also be converted to Base64 + * if necessary (eg. when it has an {@code =} sign in the value). + * + * @param commandLineArgs The command line arguments to be converted + * @return list of converted command line arguments + */ + static List convertCommandLineArgsToCTRFormat(List commandLineArgs) { + List composedTaskArguments = new ArrayList<>(); + commandLineArgs.forEach(arg -> { + if (arg == null) { + throw new IllegalArgumentException("Command line Arguments for ComposedTaskRunner contain a null entry."); } - } - - return taskConfigurationProperties.isUseUserAccessToken(); + if (arg.startsWith("app.") || arg.startsWith("--app.")) { + if(arg.startsWith("--")) { + arg = arg.substring(2); + } + String[] split = arg.split("=", 2); + // TODO convert key portion of property / argument to spring commandline format. + if (split.length == 2) { + composedTaskArguments.add("--composed-task-app-arguments." + Base64Utils.encode(split[0]) + "=" + split[1]); + } + else { + composedTaskArguments.add("--composed-task-app-arguments." + arg); + } + } + else { + composedTaskArguments.add(arg); + } + }); + return composedTaskArguments; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/TaskServicesDateUtils.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/TaskServicesDateUtils.java new file mode 100644 index 0000000000..ae88a243e1 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/TaskServicesDateUtils.java @@ -0,0 +1,47 @@ +/* + * Copyright 2016-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.service.impl; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.util.Date; + +import org.springframework.lang.NonNull; + +/** + * Provides date functionality for the task services. + * + * @author Tobias Soloschenko + */ +final class TaskServicesDateUtils { + + private TaskServicesDateUtils() { + } + + /** + * Gets the date representation for the given number of days in the past. + * + * @param numDaysAgo the number of days ago + * @return the date for {@code numDaysAgo} from today at midnight (locally) + */ + public static Date numDaysAgoFromLocalMidnightToday(@NonNull Integer numDaysAgo) { + LocalDateTime localDateTime = LocalDateTime.of(LocalDate.now(), LocalTime.MIDNIGHT).minusDays(numDaysAgo); + return Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultValidationService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultValidationService.java index 3d5ed96f1c..f00950c1c5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultValidationService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DefaultValidationService.java @@ -82,8 +82,8 @@ private boolean validateResource(Resource resource) { boolean result = false; if(resource != null) { try { - if ((resource instanceof DockerResource)) { - result = validateDockerResource(dockerValidatorProperties, (DockerResource) resource); + if ((resource instanceof DockerResource dockerResource)) { + result = validateDockerResource(dockerValidatorProperties, dockerResource); } else { new BootClassLoaderFactory(resolveAsArchive(resource), null) diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java index 2cf1ca1beb..8ba14f32e9 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/validation/DockerRegistryValidator.java @@ -21,9 +21,14 @@ import java.util.Map; import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.classic.HttpClients; +import org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager; +import org.apache.hc.client5.http.socket.ConnectionSocketFactory; +import org.apache.hc.client5.http.socket.PlainConnectionSocketFactory; +import org.apache.hc.core5.http.config.Lookup; +import org.apache.hc.core5.http.config.RegistryBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -111,19 +116,25 @@ public boolean isImagePresent() { private RestTemplate configureRestTemplate() { CloseableHttpClient httpClient - = HttpClients.custom() - .setSSLHostnameVerifier(new NoopHostnameVerifier()) + = httpClientBuilder() .build(); HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory(); requestFactory.setHttpClient(httpClient); requestFactory.setConnectTimeout(dockerValidatiorProperties.getConnectTimeoutInMillis()); - requestFactory.setReadTimeout(dockerValidatiorProperties.getReadTimeoutInMillis()); - RestTemplate restTemplate = new RestTemplate(requestFactory); return restTemplate; } + + private HttpClientBuilder httpClientBuilder() { + // Register http/s connection factories + Lookup connSocketFactoryLookup = RegistryBuilder. create() + .register("http", new PlainConnectionSocketFactory()) + .build(); + return HttpClients.custom() + .setConnectionManager(new BasicHttpClientConnectionManager(connSocketFactoryLookup)); + } private DockerAuth getDockerAuth() { DockerAuth result = null; String userName = dockerValidatiorProperties.getUserName(); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java index bac939c13b..0be5cdea1d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/stream/SkipperStreamDeployer.java @@ -40,7 +40,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.DumperOptions; +import org.yaml.snakeyaml.LoaderOptions; import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.constructor.SafeConstructor; +import org.yaml.snakeyaml.representer.Representer; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.DataFlowPropertyKeys; @@ -48,9 +51,11 @@ import org.springframework.cloud.dataflow.core.StreamDefinition; import org.springframework.cloud.dataflow.core.StreamDefinitionService; import org.springframework.cloud.dataflow.core.StreamDeployment; +import org.springframework.cloud.dataflow.core.StreamRuntimePropertyKeys; import org.springframework.cloud.dataflow.registry.service.AppRegistryService; import org.springframework.cloud.dataflow.rest.SkipperStream; import org.springframework.cloud.dataflow.server.controller.NoSuchAppException; +import org.springframework.cloud.dataflow.server.controller.NoSuchAppInstanceException; import org.springframework.cloud.dataflow.server.controller.support.InvalidStreamDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchStreamDefinitionException; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; @@ -63,6 +68,7 @@ import org.springframework.cloud.skipper.SkipperException; import org.springframework.cloud.skipper.client.SkipperClient; import org.springframework.cloud.skipper.domain.AboutResource; +import org.springframework.cloud.skipper.domain.ActuatorPostRequest; import org.springframework.cloud.skipper.domain.ConfigValues; import org.springframework.cloud.skipper.domain.Deployer; import org.springframework.cloud.skipper.domain.Info; @@ -100,578 +106,602 @@ * @author Soby Chacko * @author Glenn Renfro * @author Christian Tzolov + * @author Chris Bono */ public class SkipperStreamDeployer implements StreamDeployer { - private static final Logger logger = LoggerFactory.getLogger(SkipperStreamDeployer.class); - - //Assume version suffix added by skipper is 5 chars. - private static final int MAX_APPNAME_LENGTH = 63-5; - - private final SkipperClient skipperClient; - - private final StreamDefinitionRepository streamDefinitionRepository; - - private final AppRegistryService appRegistryService; - - private final ForkJoinPool forkJoinPool; - - private final StreamDefinitionService streamDefinitionService; - - public SkipperStreamDeployer(SkipperClient skipperClient, StreamDefinitionRepository streamDefinitionRepository, - AppRegistryService appRegistryService, ForkJoinPool forkJoinPool, - StreamDefinitionService streamDefinitionService) { - Assert.notNull(skipperClient, "SkipperClient can not be null"); - Assert.notNull(streamDefinitionRepository, "StreamDefinitionRepository can not be null"); - Assert.notNull(appRegistryService, "StreamDefinitionRepository can not be null"); - Assert.notNull(forkJoinPool, "ForkJoinPool can not be null"); - Assert.notNull(streamDefinitionService, "StreamDefinitionService can not be null"); - this.skipperClient = skipperClient; - this.streamDefinitionRepository = streamDefinitionRepository; - this.appRegistryService = appRegistryService; - this.forkJoinPool = forkJoinPool; - this.streamDefinitionService = streamDefinitionService; - } - - public static List deserializeAppStatus(String platformStatus) { - try { - if (platformStatus != null) { - ObjectMapper mapper = new ObjectMapper(); - mapper.addMixIn(AppStatus.class, AppStatusMixin.class); - mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - SimpleModule module = new SimpleModule("CustomModel", Version.unknownVersion()); - SimpleAbstractTypeResolver resolver = new SimpleAbstractTypeResolver(); - resolver.addMapping(AppInstanceStatus.class, AppInstanceStatusImpl.class); - module.setAbstractTypes(resolver); - mapper.registerModule(module); - TypeReference> typeRef = new TypeReference>() { - }; - return mapper.readValue(platformStatus, typeRef); - } - return new ArrayList<>(); - } - catch (Exception e) { - logger.error("Could not parse Skipper Platform Status JSON [" + platformStatus + "]. " + - "Exception message = " + e.getMessage()); - return new ArrayList<>(); - } - } - - @Override - public DeploymentState streamState(String streamName) { - return getStreamDeploymentState(streamName); - } - - @Override - public Map streamsStates(List streamDefinitions) { - Map nameToDefinition = new HashMap<>(); - Map states = new HashMap<>(); - List streamNamesList = new ArrayList<>(); - streamDefinitions.stream().forEach(sd -> { - streamNamesList.add(sd.getName()); - nameToDefinition.put(sd.getName(), sd); - }); - String[] streamNames = streamNamesList.toArray(new String[0]); - Map> statuses = this.skipperClient.states(streamNames); - for (Map.Entry entry: nameToDefinition.entrySet()) { - String streamName = entry.getKey(); - if (statuses != null && statuses.containsKey(streamName) && !statuses.get(streamName).isEmpty()) { - states.put(nameToDefinition.get(streamName), - StreamDeployerUtil.aggregateState(new HashSet<>(statuses.get(streamName).values()))); - } - else { - states.put(nameToDefinition.get(streamName), DeploymentState.undeployed); - } - } - return states; - } - - private DeploymentState getStreamDeploymentState(String streamName) { - DeploymentState state = null; - try { - Info info = this.skipperClient.status(streamName); - if (info.getStatus().getPlatformStatus() == null) { - return getDeploymentStateFromStatusInfo(info); - } - List appStatusList = deserializeAppStatus(info.getStatus().getPlatformStatus()); - Set deploymentStateList = appStatusList.stream().map(AppStatus::getState) - .collect(Collectors.toSet()); - state = StreamDeployerUtil.aggregateState(deploymentStateList); - } - catch (ReleaseNotFoundException e) { - // a defined stream but unknown to skipper is considered to be in an undeployed state - if (streamDefinitionExists(streamName)) { - state = DeploymentState.undeployed; - } - } - return state; - } - - private DeploymentState getDeploymentStateFromStatusInfo(Info info) { - DeploymentState result = DeploymentState.unknown; - switch (info.getStatus().getStatusCode()) { - case FAILED: - result = DeploymentState.failed; - break; - case DELETED: - result = DeploymentState.undeployed; - break; - case DEPLOYED: - result = DeploymentState.deployed; - } - return result; - } - - private boolean streamDefinitionExists(String streamName) { - return this.streamDefinitionRepository.findById(streamName).isPresent(); - } - - @Override - public void scale(String streamName, String appName, int count, Map properties) { - this.skipperClient.scale(streamName, ScaleRequest.of(appName, count, properties)); - } - - public Release deployStream(StreamDeploymentRequest streamDeploymentRequest) { - validateStreamDeploymentRequest(streamDeploymentRequest); - Map streamDeployerProperties = streamDeploymentRequest.getStreamDeployerProperties(); - String packageVersion = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_VERSION); - Assert.isTrue(StringUtils.hasText(packageVersion), "Package Version must be set"); - logger.info("Deploying Stream " + streamDeploymentRequest.getStreamName() + " using skipper."); - String repoName = streamDeployerProperties.get(SkipperStream.SKIPPER_REPO_NAME); - repoName = (StringUtils.hasText(repoName)) ? (repoName) : "local"; - String platformName = streamDeployerProperties.get(SkipperStream.SKIPPER_PLATFORM_NAME); - platformName = determinePlatformName(platformName); - String packageName = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_NAME); - packageName = (StringUtils.hasText(packageName)) ? packageName : streamDeploymentRequest.getStreamName(); - // Create the package .zip file to upload - File packageFile = createPackageForStream(packageName, packageVersion, streamDeploymentRequest); - // Upload the package - UploadRequest uploadRequest = new UploadRequest(); - uploadRequest.setName(packageName); - uploadRequest.setVersion(packageVersion); - uploadRequest.setExtension("zip"); - uploadRequest.setRepoName(repoName); // TODO use from skipperDeploymentProperties if set. - try { - uploadRequest.setPackageFileAsBytes(Files.readAllBytes(packageFile.toPath())); - } - catch (IOException e) { - throw new IllegalArgumentException("Can't read packageFile " + packageFile, e); - } - skipperClient.upload(uploadRequest); - // Install the package - String streamName = streamDeploymentRequest.getStreamName(); - InstallRequest installRequest = new InstallRequest(); - PackageIdentifier packageIdentifier = new PackageIdentifier(); - packageIdentifier.setPackageName(packageName); - packageIdentifier.setPackageVersion(packageVersion); - packageIdentifier.setRepositoryName(repoName); - installRequest.setPackageIdentifier(packageIdentifier); - InstallProperties installProperties = new InstallProperties(); - installProperties.setPlatformName(platformName); - installProperties.setReleaseName(streamName); - installProperties.setConfigValues(new ConfigValues()); - installRequest.setInstallProperties(installProperties); - Release release = null; - try { - release = this.skipperClient.install(installRequest); - } - catch (Exception e) { - logger.error("Skipper install failed. Deleting the package: " + packageName); - try { - this.skipperClient.packageDelete(packageName); - } - catch (Exception e1) { - logger.error("Package delete threw exception: " + e1.getMessage()); - } - throw new SkipperException(e.getMessage()); - } - // TODO store releasename in deploymentIdRepository... - return release; - } - - private String determinePlatformName(final String platformName) { - Collection deployers = skipperClient.listDeployers(); - if (StringUtils.hasText(platformName)) { - List filteredDeployers = deployers.stream() - .filter(d -> d.getName().equals(platformName)) - .collect(Collectors.toList()); - if (filteredDeployers.size() == 0) { - throw new IllegalArgumentException("No platform named '" + platformName + "'"); - } - else { - return platformName; - } - } - else { - if (deployers.size() == 0) { - throw new IllegalArgumentException("No platforms configured"); - } - else { - String platformNameToUse = deployers.stream().findFirst().get().getName(); - logger.info("Using platform '" + platformNameToUse + "'"); - return platformNameToUse; - } - } - } - - private void validateStreamDeploymentRequest(StreamDeploymentRequest streamDeploymentRequest) { - if (streamDeploymentRequest.getAppDeploymentRequests() == null - || streamDeploymentRequest.getAppDeploymentRequests().isEmpty()) { - // nothing to validate. - return; - } - String streamName = streamDeploymentRequest.getStreamName(); - // throw as at this point we should have definition - StreamDefinition streamDefinition = this.streamDefinitionRepository - .findById(streamName) - .orElseThrow(() -> new NoSuchStreamDefinitionException(streamDeploymentRequest.getStreamName())); - - for (AppDeploymentRequest adr : streamDeploymentRequest.getAppDeploymentRequests()) { - String registeredAppName = getRegisteredName(streamDefinition, adr.getDefinition().getName()); - String appName = String.format("%s-%s-v", streamName, registeredAppName); - if (appName.length() > 40) { - logger.warn("The stream name plus application name [" + appName + "] is longer than 40 characters." + - " This can not exceed " + MAX_APPNAME_LENGTH + " in length."); - } - if (appName.length() > MAX_APPNAME_LENGTH) { - throw new InvalidStreamDefinitionException( - String.format("The runtime application name for the app %s in the stream %s " - + "should not exceed %s in length. The runtime application name is: %s", registeredAppName, streamName, MAX_APPNAME_LENGTH, appName)); - } - String version = this.appRegistryService.getResourceVersion(adr.getResource()); - validateAppVersionIsRegistered(registeredAppName, adr, version); - } - } - - private String getRegisteredName(StreamDefinition streamDefinition, String adrAppName) { - for (StreamAppDefinition appDefinition : this.streamDefinitionService.getAppDefinitions(streamDefinition)) { - if (appDefinition.getName().equals(adrAppName)) { - return appDefinition.getRegisteredAppName(); - } - } - return adrAppName; - } - - public void validateAppVersionIsRegistered(StreamDefinition streamDefinition, AppDeploymentRequest appDeploymentRequest, String appVersion) { - String registeredAppName = getRegisteredName(streamDefinition, appDeploymentRequest.getDefinition().getName()); - this.validateAppVersionIsRegistered(registeredAppName, appDeploymentRequest, appVersion); - } - - private void validateAppVersionIsRegistered(String registeredAppName, AppDeploymentRequest appDeploymentRequest, String appVersion) { - String appTypeString = appDeploymentRequest.getDefinition().getProperties() - .get(DataFlowPropertyKeys.STREAM_APP_TYPE); - ApplicationType applicationType = ApplicationType.valueOf(appTypeString); - if (!this.appRegistryService.appExist(registeredAppName, applicationType, appVersion)) { - throw new IllegalStateException(String.format("The %s:%s:%s app is not registered!", - registeredAppName, appTypeString, appVersion)); - } - } - - private File createPackageForStream(String packageName, String packageVersion, - StreamDeploymentRequest streamDeploymentRequest) { - PackageWriter packageWriter = new DefaultPackageWriter(); - Package pkgtoWrite = createPackage(packageName, packageVersion, streamDeploymentRequest); - Path tempPath; - try { - tempPath = Files.createTempDirectory("streampackages"); - } - catch (IOException e) { - throw new IllegalArgumentException("Can't create temp diroectory"); - } - File outputDirectory = tempPath.toFile(); - - File zipFile = packageWriter.write(pkgtoWrite, outputDirectory); - return zipFile; - } - - private Package createPackage(String packageName, String packageVersion, - StreamDeploymentRequest streamDeploymentRequest) { - Package pkg = new Package(); - PackageMetadata packageMetadata = new PackageMetadata(); - packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); - packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); - packageMetadata.setName(packageName); - packageMetadata.setVersion(packageVersion); - packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); - packageMetadata.setDescription(streamDeploymentRequest.getDslText()); - pkg.setMetadata(packageMetadata); - pkg.setDependencies(createDependentPackages(packageVersion, streamDeploymentRequest)); - return pkg; - } - - private List createDependentPackages(String packageVersion, - StreamDeploymentRequest streamDeploymentRequest) { - List packageList = new ArrayList<>(); - for (AppDeploymentRequest appDeploymentRequest : streamDeploymentRequest.getAppDeploymentRequests()) { - packageList.add(createDependentPackage(packageVersion, appDeploymentRequest)); - } - return packageList; - } - - private Package createDependentPackage(String packageVersion, AppDeploymentRequest appDeploymentRequest) { - Package pkg = new Package(); - String packageName = appDeploymentRequest.getDefinition().getName(); - - PackageMetadata packageMetadata = new PackageMetadata(); - packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); - packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); - packageMetadata.setName(packageName); - packageMetadata.setVersion(packageVersion); - packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); - - pkg.setMetadata(packageMetadata); - - ConfigValues configValues = new ConfigValues(); - Map configValueMap = new HashMap<>(); - Map metadataMap = new HashMap<>(); - Map specMap = new HashMap<>(); - - // Add metadata - metadataMap.put("name", packageName); - - // Add spec - String resourceWithoutVersion = this.appRegistryService.getResourceWithoutVersion(appDeploymentRequest.getResource()); - specMap.put("resource", resourceWithoutVersion); - specMap.put("applicationProperties", appDeploymentRequest.getDefinition().getProperties()); - specMap.put("deploymentProperties", appDeploymentRequest.getDeploymentProperties()); - String version = this.appRegistryService.getResourceVersion(appDeploymentRequest.getResource()); - // Add version, including possible override via deploymentProperties - hack to store version in cmdline args - if (appDeploymentRequest.getCommandlineArguments().size() == 1) { - specMap.put("version", appDeploymentRequest.getCommandlineArguments().get(0)); - } - else { - specMap.put("version", version); - } - // Add metadata and spec to top level map - configValueMap.put("metadata", metadataMap); - configValueMap.put("spec", specMap); + private static final Logger logger = LoggerFactory.getLogger(SkipperStreamDeployer.class); + + //Assume version suffix added by skipper is 5 chars. + private static final int MAX_APPNAME_LENGTH = 63 - 5; + + private final SkipperClient skipperClient; + + private final StreamDefinitionRepository streamDefinitionRepository; + + private final AppRegistryService appRegistryService; + + private final ForkJoinPool forkJoinPool; + + private final StreamDefinitionService streamDefinitionService; + + public SkipperStreamDeployer(SkipperClient skipperClient, StreamDefinitionRepository streamDefinitionRepository, + AppRegistryService appRegistryService, ForkJoinPool forkJoinPool, + StreamDefinitionService streamDefinitionService) { + Assert.notNull(skipperClient, "SkipperClient can not be null"); + Assert.notNull(streamDefinitionRepository, "StreamDefinitionRepository can not be null"); + Assert.notNull(appRegistryService, "StreamDefinitionRepository can not be null"); + Assert.notNull(forkJoinPool, "ForkJoinPool can not be null"); + Assert.notNull(streamDefinitionService, "StreamDefinitionService can not be null"); + this.skipperClient = skipperClient; + this.streamDefinitionRepository = streamDefinitionRepository; + this.appRegistryService = appRegistryService; + this.forkJoinPool = forkJoinPool; + this.streamDefinitionService = streamDefinitionService; + } + + public static List deserializeAppStatus(String platformStatus) { + try { + if (platformStatus != null) { + ObjectMapper mapper = new ObjectMapper(); + mapper.addMixIn(AppStatus.class, AppStatusMixin.class); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + SimpleModule module = new SimpleModule("CustomModel", Version.unknownVersion()); + SimpleAbstractTypeResolver resolver = new SimpleAbstractTypeResolver(); + resolver.addMapping(AppInstanceStatus.class, AppInstanceStatusImpl.class); + module.setAbstractTypes(resolver); + mapper.registerModule(module); + TypeReference> typeRef = new TypeReference>() { + }; + return mapper.readValue(platformStatus, typeRef); + } + return new ArrayList<>(); + } catch (Exception e) { + logger.error("Could not parse Skipper Platform Status JSON [" + platformStatus + "]. " + + "Exception message = " + e.getMessage()); + return new ArrayList<>(); + } + } + + @Override + public DeploymentState streamState(String streamName) { + return getStreamDeploymentState(streamName); + } + + @Override + public Map streamsStates(List streamDefinitions) { + Map nameToDefinition = new HashMap<>(); + Map states = new HashMap<>(); + List streamNamesList = new ArrayList<>(); + streamDefinitions.stream().forEach(sd -> { + streamNamesList.add(sd.getName()); + nameToDefinition.put(sd.getName(), sd); + }); + String[] streamNames = streamNamesList.toArray(new String[0]); + Map> statuses = this.skipperClient.states(streamNames); + for (Map.Entry entry : nameToDefinition.entrySet()) { + String streamName = entry.getKey(); + if (statuses != null && statuses.containsKey(streamName) && !statuses.get(streamName).isEmpty()) { + states.put(nameToDefinition.get(streamName), + StreamDeployerUtil.aggregateState(new HashSet<>(statuses.get(streamName).values()))); + } else { + states.put(nameToDefinition.get(streamName), DeploymentState.undeployed); + } + } + return states; + } + + private DeploymentState getStreamDeploymentState(String streamName) { + DeploymentState state = null; + try { + Info info = this.skipperClient.status(streamName); + if (info.getStatus().getPlatformStatus() == null) { + return getDeploymentStateFromStatusInfo(info); + } + List appStatusList = deserializeAppStatus(info.getStatus().getPlatformStatus()); + Set deploymentStateList = appStatusList.stream().map(AppStatus::getState) + .collect(Collectors.toSet()); + state = StreamDeployerUtil.aggregateState(deploymentStateList); + } catch (ReleaseNotFoundException e) { + // a defined stream but unknown to skipper is considered to be in an undeployed state + if (streamDefinitionExists(streamName)) { + state = DeploymentState.undeployed; + } + } + return state; + } + + private DeploymentState getDeploymentStateFromStatusInfo(Info info) { + DeploymentState result = DeploymentState.unknown; + switch (info.getStatus().getStatusCode()) { + case FAILED: + result = DeploymentState.failed; + break; + case DELETED: + result = DeploymentState.undeployed; + break; + case DEPLOYED: + result = DeploymentState.deployed; + } + return result; + } + + private boolean streamDefinitionExists(String streamName) { + return this.streamDefinitionRepository.findById(streamName).isPresent(); + } + + @Override + public void scale(String streamName, String appName, int count, Map properties) { + this.skipperClient.scale(streamName, ScaleRequest.of(appName, count, properties)); + } + + public Release deployStream(StreamDeploymentRequest streamDeploymentRequest) { + validateStreamDeploymentRequest(streamDeploymentRequest); + Map streamDeployerProperties = streamDeploymentRequest.getStreamDeployerProperties(); + String packageVersion = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_VERSION); + Assert.isTrue(StringUtils.hasText(packageVersion), "Package Version must be set"); + logger.info("Deploying Stream " + streamDeploymentRequest.getStreamName() + " using skipper."); + String repoName = streamDeployerProperties.get(SkipperStream.SKIPPER_REPO_NAME); + repoName = (StringUtils.hasText(repoName)) ? (repoName) : "local"; + String platformName = streamDeployerProperties.get(SkipperStream.SKIPPER_PLATFORM_NAME); + platformName = determinePlatformName(platformName); + String packageName = streamDeployerProperties.get(SkipperStream.SKIPPER_PACKAGE_NAME); + packageName = (StringUtils.hasText(packageName)) ? packageName : streamDeploymentRequest.getStreamName(); + // Create the package .zip file to upload + File packageFile = createPackageForStream(packageName, packageVersion, streamDeploymentRequest); + // Upload the package + UploadRequest uploadRequest = new UploadRequest(); + uploadRequest.setName(packageName); + uploadRequest.setVersion(packageVersion); + uploadRequest.setExtension("zip"); + uploadRequest.setRepoName(repoName); // TODO use from skipperDeploymentProperties if set. + try { + uploadRequest.setPackageFileAsBytes(Files.readAllBytes(packageFile.toPath())); + } catch (IOException e) { + throw new IllegalArgumentException("Can't read packageFile " + packageFile, e); + } + skipperClient.upload(uploadRequest); + // Install the package + String streamName = streamDeploymentRequest.getStreamName(); + InstallRequest installRequest = new InstallRequest(); + PackageIdentifier packageIdentifier = new PackageIdentifier(); + packageIdentifier.setPackageName(packageName); + packageIdentifier.setPackageVersion(packageVersion); + packageIdentifier.setRepositoryName(repoName); + installRequest.setPackageIdentifier(packageIdentifier); + InstallProperties installProperties = new InstallProperties(); + installProperties.setPlatformName(platformName); + installProperties.setReleaseName(streamName); + installProperties.setConfigValues(new ConfigValues()); + installRequest.setInstallProperties(installProperties); + Release release = null; + try { + release = this.skipperClient.install(installRequest); + } catch (Exception e) { + logger.error("Skipper install failed. Deleting the package: " + packageName); + try { + this.skipperClient.packageDelete(packageName); + } catch (Exception e1) { + logger.error("Package delete threw exception: " + e1.getMessage()); + } + throw new SkipperException(e.getMessage()); + } + // TODO store releasename in deploymentIdRepository... + return release; + } + + private String determinePlatformName(final String platformName) { + Collection deployers = skipperClient.listDeployers(); + if (StringUtils.hasText(platformName)) { + List filteredDeployers = deployers.stream() + .filter(d -> d.getName().equals(platformName)) + .collect(Collectors.toList()); + if (filteredDeployers.size() == 0) { + throw new IllegalArgumentException("No platform named '" + platformName + "'"); + } else { + return platformName; + } + } else { + if (deployers.size() == 0) { + throw new IllegalArgumentException("No platforms configured"); + } else { + String platformNameToUse = deployers.stream().findFirst().get().getName(); + logger.info("Using platform '" + platformNameToUse + "'"); + return platformNameToUse; + } + } + } + + private void validateStreamDeploymentRequest(StreamDeploymentRequest streamDeploymentRequest) { + if (streamDeploymentRequest.getAppDeploymentRequests() == null + || streamDeploymentRequest.getAppDeploymentRequests().isEmpty()) { + // nothing to validate. + return; + } + String streamName = streamDeploymentRequest.getStreamName(); + // throw as at this point we should have definition + StreamDefinition streamDefinition = this.streamDefinitionRepository + .findById(streamName) + .orElseThrow(() -> new NoSuchStreamDefinitionException(streamDeploymentRequest.getStreamName())); + + for (AppDeploymentRequest adr : streamDeploymentRequest.getAppDeploymentRequests()) { + String registeredAppName = getRegisteredName(streamDefinition, adr.getDefinition().getName()); + String appName = String.format("%s-%s-v", streamName, registeredAppName); + if (appName.length() > 40) { + logger.warn("The stream name plus application name [" + appName + "] is longer than 40 characters." + + " This can not exceed " + MAX_APPNAME_LENGTH + " in length."); + } + if (appName.length() > MAX_APPNAME_LENGTH) { + throw new InvalidStreamDefinitionException( + String.format("The runtime application name for the app %s in the stream %s " + + "should not exceed %s in length. The runtime application name is: %s", registeredAppName, streamName, MAX_APPNAME_LENGTH, appName)); + } + String version = this.appRegistryService.getResourceVersion(adr.getResource()); + validateAppVersionIsRegistered(registeredAppName, adr, version); + } + } + + private String getRegisteredName(StreamDefinition streamDefinition, String adrAppName) { + for (StreamAppDefinition appDefinition : this.streamDefinitionService.getAppDefinitions(streamDefinition)) { + if (appDefinition.getName().equals(adrAppName)) { + return appDefinition.getRegisteredAppName(); + } + } + return adrAppName; + } + + public void validateAppVersionIsRegistered(StreamDefinition streamDefinition, AppDeploymentRequest appDeploymentRequest, String appVersion) { + String registeredAppName = getRegisteredName(streamDefinition, appDeploymentRequest.getDefinition().getName()); + this.validateAppVersionIsRegistered(registeredAppName, appDeploymentRequest, appVersion); + } + + private void validateAppVersionIsRegistered(String registeredAppName, AppDeploymentRequest appDeploymentRequest, String appVersion) { + String appTypeString = appDeploymentRequest.getDefinition().getProperties() + .get(DataFlowPropertyKeys.STREAM_APP_TYPE); + ApplicationType applicationType = ApplicationType.valueOf(appTypeString); + if (!this.appRegistryService.appExist(registeredAppName, applicationType, appVersion)) { + throw new IllegalStateException(String.format("The %s:%s:%s app is not registered!", + registeredAppName, appTypeString, appVersion)); + } + } + + private File createPackageForStream(String packageName, String packageVersion, + StreamDeploymentRequest streamDeploymentRequest) { + PackageWriter packageWriter = new DefaultPackageWriter(); + Package pkgtoWrite = createPackage(packageName, packageVersion, streamDeploymentRequest); + Path tempPath; + try { + tempPath = Files.createTempDirectory("streampackages"); + } catch (IOException e) { + throw new IllegalArgumentException("Can't create temp diroectory"); + } + File outputDirectory = tempPath.toFile(); + + File zipFile = packageWriter.write(pkgtoWrite, outputDirectory); + return zipFile; + } + + private Package createPackage(String packageName, String packageVersion, + StreamDeploymentRequest streamDeploymentRequest) { + Package pkg = new Package(); + PackageMetadata packageMetadata = new PackageMetadata(); + packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); + packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); + packageMetadata.setName(packageName); + packageMetadata.setVersion(packageVersion); + packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); + packageMetadata.setDescription(streamDeploymentRequest.getDslText()); + pkg.setMetadata(packageMetadata); + pkg.setDependencies(createDependentPackages(packageVersion, streamDeploymentRequest)); + return pkg; + } + + private List createDependentPackages(String packageVersion, + StreamDeploymentRequest streamDeploymentRequest) { + List packageList = new ArrayList<>(); + for (AppDeploymentRequest appDeploymentRequest : streamDeploymentRequest.getAppDeploymentRequests()) { + packageList.add(createDependentPackage(packageVersion, appDeploymentRequest)); + } + return packageList; + } + + private Package createDependentPackage(String packageVersion, AppDeploymentRequest appDeploymentRequest) { + Package pkg = new Package(); + String packageName = appDeploymentRequest.getDefinition().getName(); + + PackageMetadata packageMetadata = new PackageMetadata(); + packageMetadata.setApiVersion(SkipperStream.SKIPPER_DEFAULT_API_VERSION); + packageMetadata.setKind(SkipperStream.SKIPPER_DEFAULT_KIND); + packageMetadata.setName(packageName); + packageMetadata.setVersion(packageVersion); + packageMetadata.setMaintainer(SkipperStream.SKIPPER_DEFAULT_MAINTAINER); + + pkg.setMetadata(packageMetadata); + + ConfigValues configValues = new ConfigValues(); + Map configValueMap = new HashMap<>(); + Map metadataMap = new HashMap<>(); + Map specMap = new HashMap<>(); + + // Add metadata + metadataMap.put("name", packageName); + + // Add spec + String resourceWithoutVersion = this.appRegistryService.getResourceWithoutVersion(appDeploymentRequest.getResource()); + specMap.put("resource", resourceWithoutVersion); + specMap.put("applicationProperties", appDeploymentRequest.getDefinition().getProperties()); + specMap.put("deploymentProperties", appDeploymentRequest.getDeploymentProperties()); + String version = this.appRegistryService.getResourceVersion(appDeploymentRequest.getResource()); + // Add version, including possible override via deploymentProperties - hack to store version in cmdline args + if (appDeploymentRequest.getCommandlineArguments().size() == 1) { + specMap.put("version", appDeploymentRequest.getCommandlineArguments().get(0)); + } else { + specMap.put("version", version); + } + // Add metadata and spec to top level map + configValueMap.put("metadata", metadataMap); + configValueMap.put("spec", specMap); DumperOptions dumperOptions = new DumperOptions(); dumperOptions.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); dumperOptions.setDefaultScalarStyle(DumperOptions.ScalarStyle.DOUBLE_QUOTED); dumperOptions.setPrettyFlow(false); dumperOptions.setSplitLines(false); - Yaml yaml = new Yaml(dumperOptions); + Yaml yaml = new Yaml(new SafeConstructor(new LoaderOptions()), new Representer(dumperOptions), dumperOptions); configValues.setRaw(yaml.dump(configValueMap)); - pkg.setConfigValues(configValues); - pkg.setTemplates(createGenericTemplate()); - return pkg; - - } - - private List