+
+
+
+
\ No newline at end of file
diff --git a/.sdkmanrc b/.sdkmanrc
new file mode 100644
index 0000000000..93565edd75
--- /dev/null
+++ b/.sdkmanrc
@@ -0,0 +1,21 @@
+#
+# Copyright 2005-2022 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Use `sdk env` to manually apply this file.
+# Set `sdkman_auto_env=true` in $HOME/.sdkman/etc/config to make it automatic.
+#
+# NOTE: Switching branches will NOT trigger a change. Only switching folder will do it. Use `sdk env` to apply when simply switching branches.
+
+java=8.0.402-librca
diff --git a/.settings.xml b/.settings.xml
index 6066f6436c..72efc5c4d6 100644
--- a/.settings.xml
+++ b/.settings.xml
@@ -3,27 +3,44 @@
repo.spring.io
- ${env.CI_DEPLOY_USERNAME}
- ${env.CI_DEPLOY_PASSWORD}
+ ${env.ARTIFACTORY_USERNAME}
+ ${env.ARTIFACTORY_PASSWORD}
+
+
+ spring-snapshots
+ ${env.ARTIFACTORY_USERNAME}
+ ${env.ARTIFACTORY_PASSWORD}
+
+
+ spring-milestones
+ ${env.ARTIFACTORY_USERNAME}
+ ${env.ARTIFACTORY_PASSWORD}
+
+
+ spring-staging
+ ${env.ARTIFACTORY_USERNAME}
+ ${env.ARTIFACTORY_PASSWORD}
-
springtrue
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+ spring-snapshotsSpring Snapshots
- https://repo.spring.io/libs-snapshot-local
+ https://repo.spring.io/snapshottrue
@@ -31,25 +48,29 @@
spring-milestonesSpring Milestones
- https://repo.spring.io/libs-milestone-local
+ https://repo.spring.io/milestonefalse
- spring-releases
- Spring Releases
- https://repo.spring.io/release
-
- false
-
+ groovy-plugins-release
+ https://groovy.jfrog.io/artifactory/plugins-release
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+ spring-snapshotsSpring Snapshots
- https://repo.spring.io/libs-snapshot-local
+ https://repo.spring.io/snapshottrue
@@ -57,11 +78,15 @@
spring-milestonesSpring Milestones
- https://repo.spring.io/libs-milestone-local
+ https://repo.spring.io/milestonefalse
+
+ groovy-plugins-release
+ https://groovy.jfrog.io/artifactory/plugins-release
+
diff --git a/.springjavaformatconfig b/.springjavaformatconfig
new file mode 100644
index 0000000000..db822775c0
--- /dev/null
+++ b/.springjavaformatconfig
@@ -0,0 +1 @@
+java-baseline=17
\ No newline at end of file
diff --git a/spring-cloud-dataflow-registry/.jdk8 b/.trivyignore
similarity index 100%
rename from spring-cloud-dataflow-registry/.jdk8
rename to .trivyignore
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 14d8a33dca..d358781a73 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -29,6 +29,23 @@
"mainClass": "org.springframework.cloud.dataflow.server.single.DataFlowServerApplication",
"projectName": "spring-cloud-dataflow-server",
"args": "--spring.config.additional-location=src/config/scdf-mysql.yml"
+ },
+ {
+ "type": "java",
+ "name": "SCDF Debug Attach",
+ "request": "attach",
+ "hostName": "localhost",
+ "port": 5005
+ },
+ {
+ "type": "java",
+ "name": "SKIPPER default",
+ "request": "launch",
+ "cwd": "${workspaceFolder}",
+ "console": "internalConsole",
+ "mainClass": "org.springframework.cloud.skipper.server.app.SkipperServerApplication",
+ "projectName": "spring-cloud-skipper-server",
+ "args": ""
}
]
}
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 9f69c44e6b..9004a86ac8 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,3 +1,11 @@
{
- "java.configuration.maven.userSettings": ".settings.xml"
+ "java.completion.importOrder": [
+ "java",
+ "javax",
+ "",
+ "org.springframework",
+ "#"
+ ],
+ "java.configuration.maven.userSettings": ".settings.xml",
+ "java.jdt.ls.vmargs": "-XX:+UseParallelGC -XX:GCTimeRatio=4 -XX:AdaptiveSizePolicyWeight=90 -Dsun.zip.disableMemoryMapping=true -Xmx4G -Xms100m -Xlog:disable"
}
\ No newline at end of file
diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc
deleted file mode 100644
index 17783c7c06..0000000000
--- a/CODE_OF_CONDUCT.adoc
+++ /dev/null
@@ -1,44 +0,0 @@
-= Contributor Code of Conduct
-
-As contributors and maintainers of this project, and in the interest of fostering an open
-and welcoming community, we pledge to respect all people who contribute through reporting
-issues, posting feature requests, updating documentation, submitting pull requests or
-patches, and other activities.
-
-We are committed to making participation in this project a harassment-free experience for
-everyone, regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance, body size, race, ethnicity, age,
-religion, or nationality.
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information, such as physical or electronic addresses,
- without explicit permission
-* Other unethical or unprofessional conduct
-
-Project maintainers have the right and responsibility to remove, edit, or reject comments,
-commits, code, wiki edits, issues, and other contributions that are not aligned to this
-Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors
-that they deem inappropriate, threatening, offensive, or harmful.
-
-By adopting this Code of Conduct, project maintainers commit themselves to fairly and
-consistently applying these principles to every aspect of managing this project. Project
-maintainers who do not follow or enforce the Code of Conduct may be permanently removed
-from the project team.
-
-This Code of Conduct applies both within project spaces and in public spaces when an
-individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
-contacting a project maintainer at spring-code-of-conduct@pivotal.io . All complaints will
-be reviewed and investigated and will result in a response that is deemed necessary and
-appropriate to the circumstances. Maintainers are obligated to maintain confidentiality
-with regard to the reporter of an incident.
-
-This Code of Conduct is adapted from the
-https://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at
-https://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/]
diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc
new file mode 100755
index 0000000000..ec78b76c47
--- /dev/null
+++ b/CONTRIBUTING.adoc
@@ -0,0 +1,48 @@
+= Contributing to Spring Cloud Dataflow
+
+:github: https://github.com/spring-cloud/spring-cloud-dataflow
+
+Spring Cloud Dataflow is released under the Apache 2.0 license. If you would like to contribute something, or want to hack on the code this document should help you get started.
+
+
+
+== Using GitHub Issues
+We use GitHub issues to track bugs and enhancements.
+If you have a general usage question please ask on https://stackoverflow.com[Stack Overflow].
+The Spring Cloud Dataflow team and the broader community monitor the https://stackoverflow.com/tags/spring-cloud-dataflow[`spring-cloud-dataflow`] tag.
+
+If you are reporting a bug, please help to speed up problem diagnosis by providing as much information as possible.
+Ideally, that would include a small sample project that reproduces the problem.
+
+
+
+== Reporting Security Vulnerabilities
+If you think you have found a security vulnerability in Spring Cloud Data Flow please *DO NOT* disclose it publicly until we've had a chance to fix it.
+Please don't report security vulnerabilities using GitHub issues, instead head over to https://spring.io/security-policy and learn how to disclose them responsibly.
+
+
+
+== Developer Certificate of Origin
+All commits must include a **Signed-off-by** trailer at the end of each commit message to indicate that the contributor agrees to the Developer Certificate of Origin.
+For additional details, please refer to the blog post https://spring.io/blog/2025/01/06/hello-dco-goodbye-cla-simplifying-contributions-to-spring[Hello DCO, Goodbye CLA: Simplifying Contributions to Spring].
+
+
+=== Code Conventions and Housekeeping
+
+None of the following guidelines is essential for a pull request, but they all help your fellow developers understand and work with your code.
+They can also be added after the original pull request but before a merge.
+
+* Use the Spring Framework code format conventions. If you use Eclipse, you can import formatter settings by using the `eclipse-code-formatter.xml` file from the https://github.com/spring-cloud/spring-cloud-build/blob/master/spring-cloud-dependencies-parent/eclipse-code-formatter.xml[Spring Cloud Build] project.
+If you use IntelliJ, you can use the https://plugins.jetbrains.com/plugin/6546[Eclipse Code Formatter Plugin] to import the same file.
+* Make sure all new `.java` files have a simple Javadoc class comment with at least an `@author` tag identifying you, and preferably at least a paragraph describing the class's purpose.
+* Add the ASF license header comment to all new `.java` files (to do so, copy it from existing files in the project).
+* Add yourself as an `@author` to the .java files that you modify substantially (more than cosmetic changes).
+* Add some Javadocs and, if you change the namespace, some XSD doc elements.
+* A few unit tests would help a lot as well. Someone has to do it, and your fellow developers appreciate the effort.
+* If no one else uses your branch, rebase it against the current master (or other target branch in the main project).
+* When writing a commit message, follow https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html[these conventions].
+If you fix an existing issue, add `Fixes gh-XXXX` (where XXXX is the issue number) at the end of the commit message.
+
+
+== Working with the Code
+For information on editing, building, and testing the code, see the link:${github}/wiki/Working-with-the-Code[Working with the Code] page on the project wiki.
diff --git a/README.md b/README.md
index e27217b2c1..c80c2588e0 100644
--- a/README.md
+++ b/README.md
@@ -4,21 +4,11 @@
+# Spring Cloud Data Flow is no longer maintained as an open-source project by Broadcom, Inc.
+
+## For information about extended support or commercial options for Spring Cloud Data Flow, please read the official blog post [here](https://spring.io/blog/2025/04/21/spring-cloud-data-flow-commercial).
+
+
*Spring Cloud Data Flow* is a microservices-based toolkit for building streaming and batch data processing pipelines in
Cloud Foundry and Kubernetes.
@@ -42,7 +32,7 @@ Familiarize yourself with the Spring Cloud Data Flow [architecture](https://data
and [feature capabilities](https://dataflow.spring.io/features/).
**Deployer SPI**: A Service Provider Interface (SPI) is defined in the [Spring Cloud Deployer](https://github.com/spring-cloud/spring-cloud-deployer)
-project. The Deployer SPI provides an abstraction layer for deploying the apps for a given streaming or batch data pipeline,
+project. The Deployer SPI provides an abstraction layer for deploying the apps for a given streaming or batch data pipeline
and managing the application lifecycle.
Spring Cloud Deployer Implementations:
@@ -66,24 +56,13 @@ For example, if relying on Maven coordinates, an application URI would be of the
connects to the Spring Cloud Data Flow Server's REST API and supports a DSL that simplifies the process of defining a
stream or task and managing its lifecycle.
-**Community Implementations**: There are also community maintained Spring Cloud Data Flow implementations that are currently
-based on the 1.7.x series of Spring Cloud Data Flow.
-
- * [HashiCorp Nomad](https://github.com/donovanmuller/spring-cloud-dataflow-server-nomad)
- * [OpenShift](https://github.com/donovanmuller/spring-cloud-dataflow-server-openshift)
- * [Apache Mesos](https://github.com/trustedchoice/spring-cloud-dataflow-server-mesos)
-
-The [Apache YARN](https://github.com/spring-cloud/spring-cloud-dataflow-server-yarn) implementation has reached end-of-line
-status. Let us know at [Gitter](https://gitter.im/spring-cloud/spring-cloud-dataflow) if you are interested in forking
-the project to continue developing and maintaining it.
-
----
## Building
Clone the repo and type
- $ ./mvnw clean install
+ $ ./mvnw -s .settings.xml clean install
Looking for more information? Follow this [link](https://github.com/spring-cloud/spring-cloud-dataflow/blob/master/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-building.adoc).
@@ -94,13 +73,52 @@ By default Git will change the line-endings during checkout to `CRLF`. This is,
as this may lead to test failures under Windows.
Therefore, please ensure that you set Git property `core.autocrlf` to `false`, e.g. using: `$ git config core.autocrlf false`.
-Fore more information please refer to the [Git documentation, Formatting and Whitespace](https://git-scm.com/book/en/v2/Customizing-Git-Git-Configuration).
+For more information please refer to the [Git documentation, Formatting and Whitespace](https://git-scm.com/book/en/v2/Customizing-Git-Git-Configuration).
+
+----
+
+## Running Locally w/ Oracle
+By default, the Dataflow server jar does not include the Oracle database driver dependency.
+If you want to use Oracle for development/testing when running locally, you can specify the `local-dev-oracle` Maven profile when building.
+The following command will include the Oracle driver dependency in the jar:
+```
+$ ./mvnw -s .settings.xml clean package -Plocal-dev-oracle
+```
+You can follow the steps in the [Oracle on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/Oracle-on-Mac-ARM64#run-container-in-docker) Wiki to run Oracle XE locally in Docker with Dataflow pointing at it.
+
+> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima
+
+----
+
+## Running Locally w/ Microsoft SQL Server
+By default, the Dataflow server jar does not include the MSSQL database driver dependency.
+If you want to use MSSQL for development/testing when running locally, you can specify the `local-dev-mssql` Maven profile when building.
+The following command will include the MSSQL driver dependency in the jar:
+```
+$ ./mvnw -s .settings.xml clean package -Plocal-dev-mssql
+```
+You can follow the steps in the [MSSQL on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/MSSQL-on-Mac-ARM64#running-dataflow-locally-against-mssql) Wiki to run MSSQL locally in Docker with Dataflow pointing at it.
+
+> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima
+
+----
+
+## Running Locally w/ IBM DB2
+By default, the Dataflow server jar does not include the DB2 database driver dependency.
+If you want to use DB2 for development/testing when running locally, you can specify the `local-dev-db2` Maven profile when building.
+The following command will include the DB2 driver dependency in the jar:
+```
+$ ./mvnw -s .settings.xml clean package -Plocal-dev-db2
+```
+You can follow the steps in the [DB2 on Mac ARM64](https://github.com/spring-cloud/spring-cloud-dataflow/wiki/DB2-on-Mac-ARM64#running-dataflow-locally-against-db2) Wiki to run DB2 locally in Docker with Dataflow pointing at it.
+
+> **NOTE:** If you are not running Mac ARM64 just skip the steps related to Homebrew and Colima
----
## Contributing
-We welcome contributions! Follow this [link](https://github.com/spring-cloud/spring-cloud-dataflow/blob/master/spring-cloud-dataflow-docs/src/main/asciidoc/appendix-contributing.adoc) for more information on how to contribute.
+We welcome contributions! See the [CONTRIBUTING](./CONTRIBUTING.adoc) guide for details.
----
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000000..8a9410d248
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,5 @@
+# Security Policy
+## Reporting a Vulnerability
+
+If you think you have found a security vulnerability, please **DO NOT** disclose it publicly until we’ve had a chance to fix it.
+Please don’t report security vulnerabilities using GitHub issues, instead head over to https://spring.io/security-policy and learn how to disclose them responsibly.
diff --git a/build-carvel-package.sh b/build-carvel-package.sh
new file mode 100755
index 0000000000..d25d6d4f5f
--- /dev/null
+++ b/build-carvel-package.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+function create_and_clear() {
+ rm -rf "$1"
+ mkdir -p "$1"
+}
+
+SCDIR=$(realpath $(dirname "$(readlink -f "${BASH_SOURCE[0]}")"))
+set -euxo pipefail
+pushd $SCDIR > /dev/null
+./mvnw help:evaluate -Dexpression=project.version > /dev/null
+export DATAFLOW_VERSION=$(./mvnw help:evaluate -Dexpression=project.version -q -DforceStdout)
+export SKIPPER_VERSION=$(./mvnw help:evaluate -Dexpression=spring-cloud-skipper.version -pl spring-cloud-dataflow-parent -q -DforceStdout)
+
+if [ "$PACKAGE_VERSION" = "" ]; then
+ export PACKAGE_VERSION=$DATAFLOW_VERSION
+fi
+
+# you can launch a local docker registry using docker run -d -p 5000:5000 --name registry registry:2.7
+# export REPO_PREFIX=":5000/"
+readonly REPO_PREFIX="${REPO_PREFIX:-docker.io/}"
+
+export PACKAGE_BUNDLE_REPOSITORY="${REPO_PREFIX}springcloud/scdf-oss-package"
+export REPOSITORY_BUNDLE="${REPO_PREFIX}springcloud/scdf-oss-repo"
+
+export SKIPPER_REPOSITORY="springcloud/spring-cloud-skipper-server"
+export SERVER_REPOSITORY="springcloud/spring-cloud-dataflow-server"
+export CTR_VERSION=$DATAFLOW_VERSION
+export PACKAGE_NAME="scdf"
+export PACKAGE_BUNDLE_TEMPLATE="src/carvel/templates/bundle/package"
+export IMGPKG_LOCK_TEMPLATE="src/carvel/templates/imgpkg"
+export VENDIR_SRC_IN="src/carvel/config"
+export SERVER_VERSION="$DATAFLOW_VERSION"
+
+export PACKAGE_BUNDLE_GENERATED=/tmp/generated/packagebundle
+export IMGPKG_LOCK_GENERATED_IN=/tmp/generated/imgpkgin
+export IMGPKG_LOCK_GENERATED_OUT=/tmp/generated/imgpkgout
+create_and_clear $PACKAGE_BUNDLE_GENERATED
+create_and_clear $IMGPKG_LOCK_GENERATED_IN
+create_and_clear $IMGPKG_LOCK_GENERATED_OUT
+
+echo "bundle-path=$PACKAGE_BUNDLE_GENERATED"
+export SCDF_DIR="$SCDIR"
+
+sh "$SCDIR/.github/actions/build-package-bundle/build-package-bundle.sh"
+
+imgpkg push --bundle "$PACKAGE_BUNDLE_REPOSITORY:$PACKAGE_VERSION" --file "$PACKAGE_BUNDLE_GENERATED"
+
+export REPO_BUNDLE_TEMPLATE="src/carvel/templates/bundle/repo"
+
+export REPO_BUNDLE_RENDERED=/tmp/generated/reporendered
+export REPO_BUNDLE_GENERATED=/tmp/generated/repobundle
+create_and_clear $REPO_BUNDLE_RENDERED
+create_and_clear $REPO_BUNDLE_GENERATED
+
+sh "$SCDIR/.github/actions/build-repository-bundle/build-repository-bundle.sh"
+
+imgpkg push --bundle "$REPOSITORY_BUNDLE:$PACKAGE_VERSION" --file "$REPO_BUNDLE_GENERATED"
+
+popd
diff --git a/build-containers.sh b/build-containers.sh
new file mode 100755
index 0000000000..aec91b44aa
--- /dev/null
+++ b/build-containers.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+./mvnw install -s .settings.xml -DskipTests -T 1C -am -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server,:spring-cloud-dataflow-composed-task-runner,:spring-cloud-dataflow-tasklauncher-sink-rabbit,:spring-cloud-dataflow-tasklauncher-sink-kafka -B --no-transfer-progress
+./mvnw spring-boot:build-image -s .settings.xml -DskipTests -T 1C -pl :spring-cloud-dataflow-server,:spring-cloud-skipper-server,:spring-cloud-dataflow-composed-task-runner,:spring-cloud-dataflow-tasklauncher-sink-rabbit,:spring-cloud-dataflow-tasklauncher-sink-kafka -B --no-transfer-progress
\ No newline at end of file
diff --git a/lib/spring-doc-resources-0.2.5.zip b/lib/spring-doc-resources-0.2.5.zip
new file mode 100644
index 0000000000..b1ff602652
Binary files /dev/null and b/lib/spring-doc-resources-0.2.5.zip differ
diff --git a/models/batch4-5-simple.adoc b/models/batch4-5-simple.adoc
new file mode 100644
index 0000000000..3ee7cdd389
--- /dev/null
+++ b/models/batch4-5-simple.adoc
@@ -0,0 +1,16 @@
+= Simple solution
+
+* SchemaTarget Selection represents a set of schema version, prefix and name.
+* Boot 2 is default and task and batch will remain as current.
+* Boot 3 task and batch tables will have the same prefix BOOT3_
+* Data flow server will set the properties for prefixes for task and batch.
+* Registration will require Schema (Boot2, Boot3) selection indicator.
+* At task launch data flow server will create an entry in the correct task-exectution table and sequence mechanism with given prefix based on registration of task.
+* Ability to disable Boot 3 support. The feature endpoint will include this indicator.
+* The endpoints to list job and task executions will have to accept the BootVersion as an query parameter when it is absent is implies the default condition. `http://localhost:9393/tasks/executions{?schemaTarget}`
+* When using the shell to list executions it will be an optional parameter `--schema-target=boot3`
+* When viewing the Task Execution list or Job Execution list there will be a drop-down with the option of Default and Boot3.
+* The each item in the list of executions do include links to retrieve the entity, and will be encoded with the schemaTarget by the resource assembler.
+
+* The UI only needs to add the drop-downs and passing selection into the query.
+* The user will not have to do anything extra when creating composed tasks.
diff --git a/pom.xml b/pom.xml
index 04b477abab..a2ee37d1ce 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,26 +1,24 @@
-
+4.0.0
- spring-cloud-dataflow-parent
- 2.8.0-SNAPSHOT
+ org.springframework.cloud
+ spring-cloud-dataflow
+ 3.0.0-SNAPSHOT
+ spring-cloud-dataflow
+ Spring Cloud Dataflowpomhttps://cloud.spring.io/spring-cloud-dataflow/Pivotal Software, Inc.https://www.spring.io
-
- org.springframework.cloud
- spring-cloud-dataflow-build
- 2.8.0-SNAPSHOT
-
- Apache License, Version 2.0https://www.apache.org/licenses/LICENSE-2.0
- Copyright 2014-2020 the original author or authors.
+ Copyright 2014-2021 the original author or authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -51,404 +49,92 @@
https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors
-
- 1.8
- -Xdoclint:none
-
- 3.1.0-SNAPSHOT
-
- 2.8.0-SNAPSHOT
- 2.6.0-SNAPSHOT
- 2.6.0-SNAPSHOT
- 2.6.0-SNAPSHOT
- 2.6.0-SNAPSHOT
-
- 4.10.2
-
- 2.7.0-SNAPSHOT
-
- 2.3.0
-
-
- 2.3.7.RELEASE
-
- 2.2.2.RELEASE
-
- 3.1.3.RELEASE
-
- 1.6.0-SNAPSHOT
-
- 1.2.0.RELEASE
- 2.4
- 2.4
-
- 0.8.5
- 3.0.2
- 2.2.0
- 1.5.5
- 0.5
- 2.11.1
- 3.0.2
- 1.2.1
- 2.10.6
- 1.11.731
- 1.8
- 1.15.2
-
+ spring-cloud-dataflow-build
+
+ spring-cloud-dataflow-common
+ spring-cloud-common-security-config
+ spring-cloud-dataflow-parentspring-cloud-dataflow-container-registryspring-cloud-dataflow-configuration-metadataspring-cloud-dataflow-core-dslspring-cloud-dataflow-core
- spring-cloud-dataflow-registry
- spring-cloud-dataflow-rest-resource
- spring-cloud-dataflow-composed-task-runnerspring-cloud-dataflow-server-core
+ spring-cloud-dataflow-rest-resource
+ spring-cloud-dataflow-audit
+ spring-cloud-dataflow-registry
+ spring-cloud-dataflow-platform-kubernetes
+ spring-cloud-dataflow-platform-cloudfoundryspring-cloud-dataflow-autoconfigure
- spring-cloud-dataflow-serverspring-cloud-dataflow-rest-clientspring-cloud-dataflow-shellspring-cloud-dataflow-shell-core
- spring-cloud-dataflow-classic-docs
- spring-cloud-dataflow-docsspring-cloud-dataflow-completion
- spring-cloud-dataflow-dependencies
- spring-cloud-dataflow-platform-kubernetes
- spring-cloud-dataflow-platform-cloudfoundry
+ spring-cloud-skipperspring-cloud-starter-dataflow-serverspring-cloud-starter-dataflow-ui
- spring-cloud-dataflow-audit
+ spring-cloud-dataflow-server
+ spring-cloud-dataflow-tasklauncher
+ spring-cloud-dataflow-single-step-batch-job
+ spring-cloud-dataflow-composed-task-runnerspring-cloud-dataflow-test
+ spring-cloud-dataflow-dependencies
+ spring-cloud-dataflow-classic-docs
+ spring-cloud-dataflow-docs
+ spring-cloud-dataflow-package
-
-
-
- org.springframework.cloud
- spring-cloud-dataflow-common-dependencies
- ${spring-cloud-dataflow-common.version}
- pom
- import
-
-
- org.springframework.cloud
- spring-cloud-task-dependencies
- ${spring-cloud-task.version}
- pom
- import
-
-
- org.springframework.cloud
- spring-cloud-skipper-dependencies
- ${spring-cloud-skipper.version}
- pom
- import
-
-
- org.springframework.cloud
- spring-cloud-dataflow-dependencies
- 2.8.0-SNAPSHOT
- pom
- import
-
-
- org.testcontainers
- testcontainers-bom
- ${testcontainers.version}
- pom
- import
-
-
- org.springframework.cloud
- spring-cloud-dataflow-ui
- ${spring-cloud-dataflow-ui.version}
-
-
- org.springframework.cloud
- spring-cloud-deployer-spi
- ${spring-cloud-deployer.version}
-
-
- org.springframework.cloud
- spring-cloud-deployer-resource-support
- ${spring-cloud-deployer.version}
-
-
- org.springframework.cloud
- spring-cloud-deployer-resource-maven
- ${spring-cloud-deployer.version}
-
-
- org.springframework.cloud
- spring-cloud-deployer-resource-docker
- ${spring-cloud-deployer.version}
-
-
- org.springframework.cloud
- spring-cloud-deployer-local
- ${spring-cloud-deployer-local.version}
-
-
- org.springframework.cloud
- spring-cloud-deployer-cloudfoundry
- ${spring-cloud-deployer-cloudfoundry.version}
-
-
- io.pivotal.cfenv
- java-cfenv
- ${java-cfenv-boot.version}
-
-
- io.pivotal.cfenv
- java-cfenv-boot
- ${java-cfenv-boot.version}
-
-
- io.pivotal.cfenv
- java-cfenv-boot-pivotal-sso
- ${java-cfenv-boot.version}
-
-
- io.pivotal.spring.cloud
- spring-cloud-services-starter-config-client
- ${spring-cloud-services-starter-config-client.version}
-
-
- org.springframework.shell
- spring-shell
- ${spring-shell.version}
-
-
- org.springframework.cloud
- spring-cloud-starter-common-security-config-web
- ${spring-cloud-common-security-config.version}
-
-
- commons-io
- commons-io
- ${commons-io.version}
-
-
- commons-lang
- commons-lang
- ${commons-lang.version}
-
-
- io.fabric8
- kubernetes-client
- ${kubernetes-client.version}
-
-
- org.springframework.cloud
- spring-cloud-deployer-kubernetes
- ${spring-cloud-deployer-kubernetes.version}
-
-
- org.apache.directory.server
- apacheds-protocol-ldap
- ${apache-directory-server.version}
-
-
- io.codearte.props2yaml
- props2yaml
- ${codearte-props2yml.version}
-
-
- org.springframework.security.oauth
- spring-security-oauth2
- ${spring-security-oauth2.version}
-
-
- net.javacrumbs.json-unit
- json-unit-assertj
- ${json-unit.version}
-
-
- com.google.code.findbugs
- jsr305
- ${findbugs.version}
-
-
- io.micrometer.prometheus
- prometheus-rsocket-spring
- ${prometheus-rsocket-spring.version}
-
-
- joda-time
- joda-time
- ${joda-time.version}
-
-
- org.apache.commons
- commons-text
- ${commons-text.version}
-
-
- com.amazonaws
- aws-java-sdk-ecr
- ${aws-java-sdk-ecr.version}
-
-
-
+
+
+
+
+ groovy-plugins-release
+ https://groovy.jfrog.io/artifactory/plugins-release
+
+
+
+
+ groovy-plugins-release
+ https://groovy.jfrog.io/artifactory/plugins-release
+
+ org.apache.maven.plugins
- maven-surefire-plugin
- 2.22.1
+ maven-compiler-plugin
+ 3.13.0
-
- **/*Tests.java
- **/*Test.java
-
-
- **/Abstract*.java
-
-
- ${argLine}
+ true
+ 17
- org.jacoco
- jacoco-maven-plugin
+ org.codehaus.gmaven
+ groovy-maven-plugin
+ 2.1.1
+
+
+ org.apache.groovy
+ groovy
+ 4.0.23
+ pom
+
+
- agent
-
- prepare-agent
-
-
-
- report
- test
+ validate
- report
+ execute
+
+
+ ${project.basedir}
+
+ ${project.basedir}/src/test/groovy/check-pom.groovy
+
-
- org.apache.maven.plugins
- maven-checkstyle-plugin
-
-
-
-
- org.springframework.boot
- spring-boot-maven-plugin
- ${spring-boot.version}
-
-
- org.sonarsource.scanner.maven
- sonar-maven-plugin
- ${sonar-maven-plugin.version}
-
-
- org.jacoco
- jacoco-maven-plugin
- ${jacoco-maven-plugin.version}
-
-
- org.apache.maven.plugins
- maven-jar-plugin
- 3.0.2
-
-
- org.apache.maven.plugins
- maven-source-plugin
- 3.0.1
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-jxr-plugin
- 2.5
-
-
-
-
-
- deploymentfiles
-
-
-
- maven-resources-plugin
-
-
- replace-deployment-files
- process-resources
-
- copy-resources
-
-
- true
- ${basedir}/src
-
-
- ${basedir}/src/templates
-
- **/*
-
- true
-
-
-
-
-
-
-
-
-
-
- spring
-
-
- spring-snapshots
- Spring Snapshots
- https://repo.spring.io/libs-snapshot
-
- true
-
-
-
- spring-milestones
- Spring Milestones
- https://repo.spring.io/libs-milestone-local
-
- false
-
-
-
- spring-releases
- Spring Releases
- https://repo.spring.io/release
-
- false
-
-
-
-
-
- spring-snapshots
- Spring Snapshots
- https://repo.spring.io/libs-snapshot-local
-
- true
-
-
-
- spring-milestones
- Spring Milestones
- https://repo.spring.io/libs-milestone-local
-
- false
-
-
-
-
-
diff --git a/run-integration-test.sh b/run-integration-test.sh
new file mode 100755
index 0000000000..76e7657128
--- /dev/null
+++ b/run-integration-test.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+GROUP=$1
+./mvnw verify -s .settings.xml -Dgroups="$GROUP" -Pfailsafe -pl :spring-cloud-dataflow-server -B --no-transfer-progress
diff --git a/run-integration-tests.sh b/run-integration-tests.sh
new file mode 100755
index 0000000000..d13d22eeef
--- /dev/null
+++ b/run-integration-tests.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+SCDIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")
+pushd $SCDIR
+./build-containers.sh
+./run-integration-test.sh "mariadb,postgres,performance,oauth"
+
diff --git a/spring-cloud-common-security-config/README.md b/spring-cloud-common-security-config/README.md
new file mode 100644
index 0000000000..5466106ed9
--- /dev/null
+++ b/spring-cloud-common-security-config/README.md
@@ -0,0 +1,3 @@
+# Spring Cloud Common Security
+
+This repo holds the security configuration classes that are common across Spring Cloud (Spring Cloud Data Flow/Skipper for now) projects that use **Role** based authentication/authorization for their runtime server application(s).
diff --git a/spring-cloud-common-security-config/pom.xml b/spring-cloud-common-security-config/pom.xml
new file mode 100644
index 0000000000..585167a54d
--- /dev/null
+++ b/spring-cloud-common-security-config/pom.xml
@@ -0,0 +1,23 @@
+
+
+ 4.0.0
+ spring-cloud-common-security-config
+ 3.0.0-SNAPSHOT
+ pom
+ spring-cloud-common-security-config
+ Spring Cloud Common Security Config
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-build
+ 3.0.0-SNAPSHOT
+ ../spring-cloud-dataflow-build
+
+
+
+ spring-cloud-common-security-config-core
+ spring-cloud-common-security-config-web
+ spring-cloud-starter-common-security-config-web
+
+
+
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml
new file mode 100644
index 0000000000..4ec7bf0d21
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/pom.xml
@@ -0,0 +1,28 @@
+
+
+ 4.0.0
+
+ org.springframework.cloud
+ spring-cloud-common-security-config
+ 3.0.0-SNAPSHOT
+
+ spring-cloud-common-security-config-core
+ spring-cloud-common-security-config-core
+ Spring Cloud Common Security Config Core
+ jar
+
+ true
+
+
+
+ org.springframework.security
+ spring-security-oauth2-client
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java
new file mode 100644
index 0000000000..33bce77f53
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptor.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.core.support;
+
+import java.io.IOException;
+
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpRequest;
+import org.springframework.http.client.ClientHttpRequestExecution;
+import org.springframework.http.client.ClientHttpRequestInterceptor;
+import org.springframework.http.client.ClientHttpResponse;
+import org.springframework.security.oauth2.core.OAuth2AccessToken;
+import org.springframework.util.Assert;
+
+/**
+ * This implementation of a {@link ClientHttpRequestInterceptor} will retrieve, if available, the OAuth2 Access Token
+ * and add it to the {@code Authorization} HTTP header.
+ *
+ * @author Gunnar Hillert
+ */
+public class OAuth2AccessTokenProvidingClientHttpRequestInterceptor implements ClientHttpRequestInterceptor {
+
+ private final String staticOauthAccessToken;
+
+ private final OAuth2TokenUtilsService oauth2TokenUtilsService;
+
+ public OAuth2AccessTokenProvidingClientHttpRequestInterceptor(String staticOauthAccessToken) {
+ super();
+ Assert.hasText(staticOauthAccessToken, "staticOauthAccessToken must not be null or empty.");
+ this.staticOauthAccessToken = staticOauthAccessToken;
+ this.oauth2TokenUtilsService = null;
+ }
+
+ public OAuth2AccessTokenProvidingClientHttpRequestInterceptor(OAuth2TokenUtilsService oauth2TokenUtilsService) {
+ super();
+ this.oauth2TokenUtilsService = oauth2TokenUtilsService;
+ this.staticOauthAccessToken = null;
+ }
+
+ @Override
+ public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution)
+ throws IOException {
+
+ final String tokenToUse;
+
+ if (this.staticOauthAccessToken != null) {
+ tokenToUse = this.staticOauthAccessToken;
+ }
+ else if (this.oauth2TokenUtilsService != null){
+ tokenToUse = this.oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser();
+ }
+ else {
+ tokenToUse = null;
+ }
+
+ if (tokenToUse != null) {
+ request.getHeaders().add(HttpHeaders.AUTHORIZATION, OAuth2AccessToken.TokenType.BEARER.getValue() + " " + tokenToUse);
+ }
+ return execution.execute(request, body);
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java
new file mode 100644
index 0000000000..f03ba97f8a
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/main/java/org/springframework/cloud/common/security/core/support/OAuth2TokenUtilsService.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2019-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.core.support;
+
+import org.springframework.security.core.Authentication;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
+import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken;
+
+/**
+ * Service providing OAuth2 Security-related utility methods that may
+ * required other Spring Security services.
+ *
+ * @author Gunnar Hillert
+ * @author Corneil du Plessis
+ *
+ */
+public interface OAuth2TokenUtilsService {
+
+ /**
+ * Retrieves the access token from the {@link Authentication} implementation.
+ *
+ * @return Should never return null.
+ */
+ String getAccessTokenOfAuthenticatedUser();
+
+ /**
+ *
+ * @return A client for the token.
+ */
+ OAuth2AuthorizedClient getAuthorizedClient(OAuth2AuthenticationToken auth2AuthenticationToken);
+
+ /**
+ *
+ * @param auth2AuthorizedClient Remove a client
+ */
+ void removeAuthorizedClient(OAuth2AuthorizedClient auth2AuthorizedClient);
+
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java
new file mode 100644
index 0000000000..d92948c524
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-core/src/test/java/org/springframework/cloud/common/security/core/support/OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2018-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.core.support;
+
+import java.io.IOException;
+import java.util.Collections;
+
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpRequest;
+import org.springframework.http.client.ClientHttpRequestExecution;
+import org.springframework.test.util.ReflectionTestUtils;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.entry;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+/**
+ *
+ * @author Gunnar Hillert
+ * @author Corneil du Plessis
+ */
+class OAuth2AccessTokenProvidingClientHttpRequestInterceptorTests {
+
+ @Test
+ void oAuth2AccessTokenProvidingClientHttpRequestInterceptorWithEmptyConstructior() {
+ assertThatThrownBy(() -> new OAuth2AccessTokenProvidingClientHttpRequestInterceptor(""))
+ .isInstanceOf(IllegalArgumentException.class)
+ .hasMessage("staticOauthAccessToken must not be null or empty.");
+ }
+
+ @Test
+ void oAuth2AccessTokenProvidingClientHttpRequestInterceptorWithStaticTokenConstructor() {
+ final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor =
+ new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar");
+
+ final String accessToken = (String) ReflectionTestUtils.getField(interceptor, "staticOauthAccessToken");
+ assertThat(accessToken).isEqualTo("foobar");
+ }
+
+ @Test
+ void interceptWithStaticToken() throws IOException {
+ final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor =
+ new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar");
+ final HttpHeaders headers = setupTest(interceptor);
+
+ assertThat(headers)
+ .hasSize(1)
+ .contains(entry("Authorization", Collections.singletonList("Bearer foobar")));
+ }
+
+ @Test
+ void interceptWithAuthentication() throws IOException {
+ final OAuth2TokenUtilsService oauth2TokenUtilsService = mock(OAuth2TokenUtilsService.class);
+ when(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).thenReturn("foo-bar-123-token");
+
+ final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor =
+ new OAuth2AccessTokenProvidingClientHttpRequestInterceptor(oauth2TokenUtilsService);
+ final HttpHeaders headers = setupTest(interceptor);
+
+ assertThat(headers)
+ .hasSize(1)
+ .contains(entry("Authorization", Collections.singletonList("Bearer foo-bar-123-token")));
+ }
+
+ @Test
+ void interceptWithAuthenticationAndStaticToken() throws IOException {
+ final OAuth2TokenUtilsService oauth2TokenUtilsService = mock(OAuth2TokenUtilsService.class);
+ when(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).thenReturn("foo-bar-123-token");
+
+ final OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor =
+ new OAuth2AccessTokenProvidingClientHttpRequestInterceptor("foobar");
+ final HttpHeaders headers = setupTest(interceptor);
+
+ assertThat(headers)
+ .hasSize(1)
+ .contains(entry("Authorization", Collections.singletonList("Bearer foobar")));
+ }
+
+ private HttpHeaders setupTest( OAuth2AccessTokenProvidingClientHttpRequestInterceptor interceptor) throws IOException {
+ final HttpRequest request = Mockito.mock(HttpRequest.class);
+ final ClientHttpRequestExecution clientHttpRequestExecution = Mockito.mock(ClientHttpRequestExecution.class);
+ final HttpHeaders headers = new HttpHeaders();
+
+ when(request.getHeaders()).thenReturn(headers);
+ interceptor.intercept(request, null, clientHttpRequestExecution);
+ verify(clientHttpRequestExecution, Mockito.times(1)).execute(request, null);
+ return headers;
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml
new file mode 100644
index 0000000000..23093553be
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/pom.xml
@@ -0,0 +1,80 @@
+
+
+ 4.0.0
+
+ org.springframework.cloud
+ spring-cloud-common-security-config
+ 3.0.0-SNAPSHOT
+
+ spring-cloud-common-security-config-web
+ spring-cloud-common-security-config-web
+ Spring Cloud Common Security Config Web
+ jar
+
+ true
+
+
+
+ org.springframework.cloud
+ spring-cloud-common-security-config-core
+ ${project.version}
+
+
+ org.springframework.security
+ spring-security-oauth2-jose
+
+
+ org.springframework.security
+ spring-security-oauth2-resource-server
+
+
+ org.springframework
+ spring-webflux
+
+
+ io.projectreactor.netty
+ reactor-netty
+
+
+ jakarta.servlet
+ jakarta.servlet-api
+
+
+ org.springframework.boot
+ spring-boot-starter-security
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+
+
+ org.springframework.session
+ spring-session-core
+
+
+ org.springframework.boot
+ spring-boot-starter-actuator
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ com.squareup.okhttp3
+ okhttp
+ test
+
+
+ com.squareup.okhttp3
+ mockwebserver
+ test
+
+
+ junit
+ junit
+ test
+
+
+
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java
new file mode 100644
index 0000000000..8efea5f00e
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/AuthorizationProperties.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2016-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Holds configuration for the authorization aspects of security.
+ *
+ * @author Eric Bottard
+ * @author Gunnar Hillert
+ * @author Ilayaperumal Gopinathan
+ * @author Mike Heath
+ */
+public class AuthorizationProperties {
+
+ private String externalAuthoritiesUrl;
+
+ private List rules = new ArrayList<>();
+
+ private String dashboardUrl = "/dashboard";
+
+ private String loginUrl = "/#/login";
+
+ private String loginProcessingUrl = "/login";
+
+ private String logoutUrl = "/logout";
+
+ private String logoutSuccessUrl = "/logout-success.html";
+
+ private List permitAllPaths = new ArrayList<>();
+
+ private List authenticatedPaths = new ArrayList<>();
+
+ /**
+ * Role-mapping configuration per OAuth2 provider.
+ */
+ private final Map providerRoleMappings = new HashMap<>();
+
+ private String defaultProviderId;
+
+ public Map getProviderRoleMappings() {
+ return providerRoleMappings;
+ }
+
+ public List getRules() {
+ return rules;
+ }
+
+ public void setRules(List rules) {
+ this.rules = rules;
+ }
+
+ public String getExternalAuthoritiesUrl() {
+ return externalAuthoritiesUrl;
+ }
+
+ public void setExternalAuthoritiesUrl(String externalAuthoritiesUrl) {
+ this.externalAuthoritiesUrl = externalAuthoritiesUrl;
+ }
+
+ public String getDashboardUrl() {
+ return dashboardUrl;
+ }
+
+ public void setDashboardUrl(String dashboardUrl) {
+ this.dashboardUrl = dashboardUrl;
+ }
+
+ public String getLoginUrl() {
+ return loginUrl;
+ }
+
+ public void setLoginUrl(String loginUrl) {
+ this.loginUrl = loginUrl;
+ }
+
+ public String getLoginProcessingUrl() {
+ return loginProcessingUrl;
+ }
+
+ public void setLoginProcessingUrl(String loginProcessingUrl) {
+ this.loginProcessingUrl = loginProcessingUrl;
+ }
+
+ public String getLogoutUrl() {
+ return logoutUrl;
+ }
+
+ public void setLogoutUrl(String logoutUrl) {
+ this.logoutUrl = logoutUrl;
+ }
+
+ public String getLogoutSuccessUrl() {
+ return logoutSuccessUrl;
+ }
+
+ public void setLogoutSuccessUrl(String logoutSuccessUrl) {
+ this.logoutSuccessUrl = logoutSuccessUrl;
+ }
+
+ public List getPermitAllPaths() {
+ return permitAllPaths;
+ }
+
+ public void setPermitAllPaths(List permitAllPaths) {
+ this.permitAllPaths = permitAllPaths;
+ }
+
+ public List getAuthenticatedPaths() {
+ return authenticatedPaths;
+ }
+
+ public void setAuthenticatedPaths(List authenticatedPaths) {
+ this.authenticatedPaths = authenticatedPaths;
+ }
+
+ public void setDefaultProviderId(String defaultProviderId) {
+ this.defaultProviderId = defaultProviderId;
+ }
+
+ public String getDefaultProviderId() {
+ return defaultProviderId;
+ }
+
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java
new file mode 100644
index 0000000000..f3011fff7d
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/CommonSecurityAutoConfiguration.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018-2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
+import org.springframework.boot.autoconfigure.AutoConfigureBefore;
+import org.springframework.boot.autoconfigure.security.oauth2.client.servlet.OAuth2ClientAutoConfiguration;
+import org.springframework.boot.autoconfigure.security.oauth2.resource.servlet.OAuth2ResourceServerAutoConfiguration;
+import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration(proxyBeanMethods = false)
+@AutoConfigureBefore({
+ SecurityAutoConfiguration.class,
+ ManagementWebSecurityAutoConfiguration.class,
+ OAuth2ClientAutoConfiguration.class,
+ OAuth2ResourceServerAutoConfiguration.class})
+public class CommonSecurityAutoConfiguration {
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java
new file mode 100644
index 0000000000..29bb4d4858
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/IgnoreAllSecurityConfiguration.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import org.springframework.cloud.common.security.support.OnOAuth2SecurityDisabled;
+import org.springframework.context.annotation.Conditional;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.web.WebSecurityConfigurer;
+import org.springframework.security.config.annotation.web.builders.WebSecurity;
+
+/**
+ * Spring Security {@link WebSecurityConfigurer} simply ignoring all paths conditionally if security is not enabled.
+ *
+ * The org.springframework.cloud.common.security.enabled=true property disables this configuration and
+ * fall back to the Spring Boot default security configuration.
+ *
+ * @author Janne Valkealahti
+ * @author Gunnar Hillert
+ * @author Christian Tzolov
+ *
+ */
+@Configuration
+@Conditional(OnOAuth2SecurityDisabled.class)
+public class IgnoreAllSecurityConfiguration implements WebSecurityConfigurer {
+
+ @Override
+ public void init(WebSecurity builder) {
+ }
+
+ @Override
+ public void configure(WebSecurity builder) {
+ builder.ignoring().requestMatchers("/**");
+ }
+
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java
new file mode 100644
index 0000000000..047eb5ba52
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ManualOAuthAuthenticationProvider.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2016-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import org.slf4j.LoggerFactory;
+
+import org.springframework.security.authentication.AuthenticationProvider;
+import org.springframework.security.authentication.AuthenticationServiceException;
+import org.springframework.security.authentication.BadCredentialsException;
+import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.AuthenticationException;
+import org.springframework.security.core.context.SecurityContext;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient;
+import org.springframework.security.oauth2.client.endpoint.OAuth2PasswordGrantRequest;
+import org.springframework.security.oauth2.client.registration.ClientRegistration;
+import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
+import org.springframework.security.oauth2.core.AuthorizationGrantType;
+import org.springframework.security.oauth2.core.OAuth2AuthorizationException;
+import org.springframework.security.oauth2.core.endpoint.OAuth2AccessTokenResponse;
+import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthenticationToken;
+import org.springframework.security.oauth2.server.resource.authentication.OpaqueTokenAuthenticationProvider;
+import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector;
+import org.springframework.web.client.ResourceAccessException;
+
+/**
+ * Provides a custom {@link AuthenticationProvider} that allows for authentication
+ * (username and password) against an OAuth Server using a {@code password grant}.
+ *
+ * @author Gunnar Hillert
+ */
+public class ManualOAuthAuthenticationProvider implements AuthenticationProvider {
+
+ private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ManualOAuthAuthenticationProvider.class);
+
+ private final OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient;
+ private final ClientRegistrationRepository clientRegistrationRepository;
+ private final AuthenticationProvider authenticationProvider;
+ private final String providerId;
+
+ public ManualOAuthAuthenticationProvider(
+ OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient,
+ ClientRegistrationRepository clientRegistrationRepository,
+ OpaqueTokenIntrospector opaqueTokenIntrospector,
+ String providerId) {
+
+ this.oAuth2PasswordTokenResponseClient = oAuth2PasswordTokenResponseClient;
+ this.clientRegistrationRepository = clientRegistrationRepository;
+ this.authenticationProvider =
+ new OpaqueTokenAuthenticationProvider(opaqueTokenIntrospector);
+ this.providerId = providerId;
+ }
+
+ @Override
+ public Authentication authenticate(Authentication authentication) throws AuthenticationException {
+ final String username = authentication.getName();
+ final String password = authentication.getCredentials().toString();
+
+ final ClientRegistration clientRegistration = clientRegistrationRepository.findByRegistrationId(providerId);
+ final ClientRegistration clientRegistrationPassword = ClientRegistration.withClientRegistration(clientRegistration).authorizationGrantType(AuthorizationGrantType.PASSWORD).build();
+
+ final OAuth2PasswordGrantRequest grantRequest = new OAuth2PasswordGrantRequest(clientRegistrationPassword, username, password);
+ final OAuth2AccessTokenResponse accessTokenResponse;
+ final String accessTokenUri = clientRegistration.getProviderDetails().getTokenUri();
+
+ try {
+ accessTokenResponse = oAuth2PasswordTokenResponseClient.getTokenResponse(grantRequest);
+ logger.warn("Authenticating user '{}' using accessTokenUri '{}'.", username, accessTokenUri);
+ }
+ catch (OAuth2AuthorizationException e) {
+ if (e.getCause() instanceof ResourceAccessException) {
+ final String errorMessage = String.format(
+ "While authenticating user '%s': " + "Unable to access accessTokenUri '%s'.", username,
+ accessTokenUri);
+ logger.error(errorMessage + " Error message: {}.", e.getCause().getMessage());
+ throw new AuthenticationServiceException(errorMessage, e);
+ }
+ else {
+ throw new BadCredentialsException(String.format("Access denied for user '%s'.", username), e);
+ }
+
+ }
+
+ final BearerTokenAuthenticationToken authenticationRequest = new BearerTokenAuthenticationToken(accessTokenResponse.getAccessToken().getTokenValue());
+
+ Authentication newAuthentication = null;
+ try {
+ newAuthentication = this.authenticationProvider.authenticate(authenticationRequest);
+ SecurityContext context = SecurityContextHolder.createEmptyContext();
+ context.setAuthentication(newAuthentication);
+ SecurityContextHolder.setContext(context);
+ } catch (AuthenticationException failed) {
+ SecurityContextHolder.clearContext();
+ logger.warn("Authentication request for failed!", failed);
+ //this.authenticationFailureHandler.onAuthenticationFailure(request, response, failed);
+ }
+
+ return newAuthentication;
+ }
+
+ @Override
+ public boolean supports(Class> authentication) {
+ return authentication.equals(UsernamePasswordAuthenticationToken.class);
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthClientConfiguration.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthClientConfiguration.java
new file mode 100644
index 0000000000..4ef9ba1aa8
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthClientConfiguration.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties;
+import org.springframework.boot.autoconfigure.security.oauth2.resource.OAuth2ResourceServerProperties;
+import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService;
+import org.springframework.cloud.common.security.support.AuthoritiesMapper;
+import org.springframework.cloud.common.security.support.CustomAuthoritiesOpaqueTokenIntrospector;
+import org.springframework.cloud.common.security.support.CustomOAuth2OidcUserService;
+import org.springframework.cloud.common.security.support.CustomPlainOAuth2UserService;
+import org.springframework.cloud.common.security.support.DefaultAuthoritiesMapper;
+import org.springframework.cloud.common.security.support.DefaultOAuth2TokenUtilsService;
+import org.springframework.cloud.common.security.support.ExternalOauth2ResourceAuthoritiesMapper;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.authentication.AuthenticationProvider;
+import org.springframework.security.authentication.ProviderManager;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClientManager;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService;
+import org.springframework.security.oauth2.client.endpoint.DefaultPasswordTokenResponseClient;
+import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient;
+import org.springframework.security.oauth2.client.endpoint.OAuth2PasswordGrantRequest;
+import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest;
+import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
+import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
+import org.springframework.security.oauth2.client.userinfo.OAuth2UserService;
+import org.springframework.security.oauth2.client.web.reactive.function.client.ServletOAuth2AuthorizedClientExchangeFilterFunction;
+import org.springframework.security.oauth2.core.oidc.user.OidcUser;
+import org.springframework.security.oauth2.core.user.OAuth2User;
+import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector;
+import org.springframework.util.StringUtils;
+import org.springframework.web.reactive.function.client.WebClient;
+
+@Configuration(proxyBeanMethods = false)
+public class OAuthClientConfiguration {
+
+ @Configuration(proxyBeanMethods = false)
+ protected static class OAuth2AccessTokenResponseClientConfig {
+ @Bean
+ OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient() {
+ return new DefaultPasswordTokenResponseClient();
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri")
+ protected static class AuthenticationProviderConfig {
+
+ protected OpaqueTokenIntrospector opaqueTokenIntrospector;
+
+ @Autowired(required = false)
+ public void setOpaqueTokenIntrospector(OpaqueTokenIntrospector opaqueTokenIntrospector) {
+ this.opaqueTokenIntrospector = opaqueTokenIntrospector;
+ }
+
+ @Bean
+ protected AuthenticationProvider authenticationProvider(
+ OAuth2AccessTokenResponseClient oAuth2PasswordTokenResponseClient,
+ ClientRegistrationRepository clientRegistrationRepository,
+ AuthorizationProperties authorizationProperties,
+ OAuth2ClientProperties oauth2ClientProperties) {
+ return new ManualOAuthAuthenticationProvider(
+ oAuth2PasswordTokenResponseClient,
+ clientRegistrationRepository,
+ this.opaqueTokenIntrospector,
+ calculateDefaultProviderId(authorizationProperties, oauth2ClientProperties));
+
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri")
+ protected static class ProviderManagerConfig {
+ private AuthenticationProvider authenticationProvider;
+
+ @Autowired(required = false)
+ protected void setAuthenticationProvider(AuthenticationProvider authenticationProvider) {
+ this.authenticationProvider = authenticationProvider;
+ }
+
+ @Bean
+ protected ProviderManager providerManager() {
+ List providers = new ArrayList<>();
+ providers.add(authenticationProvider);
+ return new ProviderManager(providers);
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ protected static class OAuth2TokenUtilsServiceConfig {
+ @Bean
+ protected OAuth2TokenUtilsService oauth2TokenUtilsService(OAuth2AuthorizedClientService oauth2AuthorizedClientService) {
+ return new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService);
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ protected static class AuthoritiesMapperConfig {
+
+ @Bean
+ protected AuthoritiesMapper authorityMapper(AuthorizationProperties authorizationProperties,
+ OAuth2ClientProperties oAuth2ClientProperties) {
+ AuthoritiesMapper authorityMapper;
+ if (!StringUtils.hasText(authorizationProperties.getExternalAuthoritiesUrl())) {
+ authorityMapper = new DefaultAuthoritiesMapper(
+ authorizationProperties.getProviderRoleMappings(),
+ calculateDefaultProviderId(authorizationProperties, oAuth2ClientProperties));
+ } else {
+ authorityMapper = new ExternalOauth2ResourceAuthoritiesMapper(
+ URI.create(authorizationProperties.getExternalAuthoritiesUrl()));
+ }
+ return authorityMapper;
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ protected static class OidcUserServiceConfig {
+
+ @Bean
+ protected OAuth2UserService oidcUserService(AuthoritiesMapper authoritiesMapper) {
+ return new CustomOAuth2OidcUserService(authoritiesMapper);
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ protected static class PlainOauth2UserServiceConfig {
+
+ @Bean
+ protected OAuth2UserService plainOauth2UserService(
+ AuthoritiesMapper authoritiesMapper) {
+ return new CustomPlainOAuth2UserService(authoritiesMapper);
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ @ConditionalOnProperty(prefix = "spring.security.oauth2.resourceserver.opaquetoken", value = "introspection-uri")
+ protected static class OpaqueTokenIntrospectorConfig {
+ @Bean
+ protected OpaqueTokenIntrospector opaqueTokenIntrospector(OAuth2ResourceServerProperties oAuth2ResourceServerProperties,
+ AuthoritiesMapper authoritiesMapper) {
+ return new CustomAuthoritiesOpaqueTokenIntrospector(
+ oAuth2ResourceServerProperties.getOpaquetoken().getIntrospectionUri(),
+ oAuth2ResourceServerProperties.getOpaquetoken().getClientId(),
+ oAuth2ResourceServerProperties.getOpaquetoken().getClientSecret(),
+ authoritiesMapper);
+ }
+ }
+
+ public static String calculateDefaultProviderId(AuthorizationProperties authorizationProperties, OAuth2ClientProperties oauth2ClientProperties) {
+ if (authorizationProperties.getDefaultProviderId() != null) {
+ return authorizationProperties.getDefaultProviderId();
+ }
+ else if (oauth2ClientProperties.getRegistration().size() == 1) {
+ return oauth2ClientProperties.getRegistration().entrySet().iterator().next()
+ .getKey();
+ }
+ else if (oauth2ClientProperties.getRegistration().size() > 1
+ && !StringUtils.hasText(authorizationProperties.getDefaultProviderId())) {
+ throw new IllegalStateException("defaultProviderId must be set if more than 1 Registration is provided.");
+ }
+ else {
+ throw new IllegalStateException("Unable to retrieve default provider id.");
+ }
+ }
+
+ @Configuration(proxyBeanMethods = false)
+ protected static class WebClientConfig {
+
+ @Bean
+ protected WebClient webClient(OAuth2AuthorizedClientManager authorizedClientManager) {
+ ServletOAuth2AuthorizedClientExchangeFilterFunction oauth2Client =
+ new ServletOAuth2AuthorizedClientExchangeFilterFunction(authorizedClientManager);
+ oauth2Client.setDefaultOAuth2AuthorizedClient(true);
+ return WebClient.builder()
+ .apply(oauth2Client.oauth2Configuration())
+ .build();
+ }
+ }
+
+
+}
diff --git a/spring-cloud-dataflow-rest-resource/.jdk8 b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthSecurityConfiguration.java
similarity index 100%
rename from spring-cloud-dataflow-rest-resource/.jdk8
rename to spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/OAuthSecurityConfiguration.java
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java
new file mode 100644
index 0000000000..fe679e6bc5
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/ProviderRoleMapping.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.springframework.cloud.common.security.support.CoreSecurityRoles;
+import org.springframework.util.Assert;
+import org.springframework.util.CollectionUtils;
+import org.springframework.util.StringUtils;
+
+/**
+ * Holds configuration for the authorization aspects of security.
+ *
+ * @author Gunnar Hillert
+ *
+ */
+public class ProviderRoleMapping {
+
+ private String oauthScopePrefix = "dataflow.";
+ private String rolePrefix = "ROLE_";
+ private String groupClaim = "roles";
+ private boolean mapOauthScopes = false;
+ private boolean parseOauthScopePathParts = true;
+ private boolean mapGroupClaims = false;
+ private Map roleMappings = new HashMap<>(0);
+ private Map groupMappings = new HashMap<>(0);
+ private String principalClaimName;
+
+ public ProviderRoleMapping() {
+ super();
+ }
+
+ public ProviderRoleMapping(boolean mapOauthScopes) {
+ this.mapOauthScopes = mapOauthScopes;
+ }
+
+ public ProviderRoleMapping(boolean mapOauthScopes, Map roleMappings) {
+ Assert.notNull(roleMappings, "roleMappings must not be null.");
+ this.mapOauthScopes = mapOauthScopes;
+ this.roleMappings = roleMappings;
+ }
+
+ public boolean isParseOauthScopePathParts() {
+ return parseOauthScopePathParts;
+ }
+
+ /**
+ * Sets whether or not to treat OAuth scopes as URIs during the role mapping.
+ * When set to {@code true} the OAuth scope will be treated as a URI and the leading part will be ignored (eg. 'api://dataflow-server/dataflow.create' will result in 'dataflow.create').
+ * When set to {@code false} the OAuth scope will be used as-is. This is useful in cases where the scope is not a URI and contains '/' leading characters.
+ *
+ * @param parseOauthScopePathParts whether or not to treat OAuth scopes as URIs during the role mapping
+ */
+ public void setParseOauthScopePathParts(boolean parseOauthScopePathParts) {
+ this.parseOauthScopePathParts = parseOauthScopePathParts;
+ }
+
+ public boolean isMapOauthScopes() {
+ return mapOauthScopes;
+ }
+
+ /**
+ * If set to true, Oauth scopes will be mapped to corresponding Data Flow roles.
+ * Otherwise, if set to false, or not set at all, all roles will be assigned to users.
+ *
+ * @param mapOauthScopes If not set defaults to false
+ */
+ public void setMapOauthScopes(boolean mapOauthScopes) {
+ this.mapOauthScopes = mapOauthScopes;
+ }
+
+ public boolean isMapGroupClaims() {
+ return mapGroupClaims;
+ }
+
+ public void setMapGroupClaims(boolean mapGroupClaims) {
+ this.mapGroupClaims = mapGroupClaims;
+ }
+
+ /**
+ * When using OAuth2 with enabled {@link #setMapOauthScopes(boolean)}, you can optionally specify a custom
+ * mapping of OAuth scopes to role names as they exist in the Data Flow application. If not
+ * set, then the OAuth scopes themselves must match the role names:
+ *
+ *
+ *
MANAGE = dataflow.manage
+ *
VIEW = dataflow.view
+ *
CREATE = dataflow.create
+ *
+ *
+ * @return Optional (May be null). Returns a map of scope-to-role mappings.
+ */
+ public Map getRoleMappings() {
+ return roleMappings;
+ }
+
+ public ProviderRoleMapping addRoleMapping(String oauthScope, String roleName) {
+ this.roleMappings.put(oauthScope, roleName);
+ return this;
+ }
+
+ public Map getGroupMappings() {
+ return groupMappings;
+ }
+
+ public void setGroupMappings(Map groupMappings) {
+ this.groupMappings = groupMappings;
+ }
+
+ public String getGroupClaim() {
+ return groupClaim;
+ }
+
+ public void setGroupClaim(String groupClaim) {
+ this.groupClaim = groupClaim;
+ }
+
+ public String getPrincipalClaimName() {
+ return principalClaimName;
+ }
+
+ public void setPrincipalClaimName(String principalClaimName) {
+ this.principalClaimName = principalClaimName;
+ }
+
+ public Map convertGroupMappingKeysToCoreSecurityRoles() {
+
+ final Map groupMappings = new HashMap<>(0);
+
+ if (CollectionUtils.isEmpty(this.groupMappings)) {
+ for (CoreSecurityRoles roleEnum : CoreSecurityRoles.values()) {
+ final String roleName = this.oauthScopePrefix + roleEnum.getKey();
+ groupMappings.put(roleEnum, roleName);
+ }
+ return groupMappings;
+ }
+
+ final List unmappedRoles = new ArrayList<>(0);
+
+ for (CoreSecurityRoles coreRole : CoreSecurityRoles.values()) {
+
+ final String coreSecurityRoleName;
+ if (this.rolePrefix.length() > 0 && !coreRole.getKey().startsWith(rolePrefix)) {
+ coreSecurityRoleName = rolePrefix + coreRole.getKey();
+ }
+ else {
+ coreSecurityRoleName = coreRole.getKey();
+ }
+
+ final String oauthScope = this.groupMappings.get(coreSecurityRoleName);
+
+ if (oauthScope == null) {
+ unmappedRoles.add(coreRole);
+ }
+ else {
+ groupMappings.put(coreRole, oauthScope);
+ }
+ }
+
+ if (!unmappedRoles.isEmpty()) {
+ throw new IllegalArgumentException(
+ String.format("The following %s %s not mapped: %s.",
+ unmappedRoles.size(),
+ unmappedRoles.size() > 1 ? "roles are" : "role is",
+ StringUtils.collectionToDelimitedString(unmappedRoles, ", ")));
+ }
+
+ return groupMappings;
+ }
+
+ /**
+ * @return Map containing the {@link CoreSecurityRoles} as key and the associated role name (String) as value.
+ */
+ public Map convertRoleMappingKeysToCoreSecurityRoles() {
+
+ final Map roleMappings = new HashMap<>(0);
+
+ if (CollectionUtils.isEmpty(this.roleMappings)) {
+ for (CoreSecurityRoles roleEnum : CoreSecurityRoles.values()) {
+ final String roleName = this.oauthScopePrefix + roleEnum.getKey();
+ roleMappings.put(roleEnum, roleName);
+ }
+ return roleMappings;
+ }
+
+ final List unmappedRoles = new ArrayList<>(0);
+
+ for (CoreSecurityRoles coreRole : CoreSecurityRoles.values()) {
+
+ final String coreSecurityRoleName;
+ if (this.rolePrefix.length() > 0 && !coreRole.getKey().startsWith(rolePrefix)) {
+ coreSecurityRoleName = rolePrefix + coreRole.getKey();
+ }
+ else {
+ coreSecurityRoleName = coreRole.getKey();
+ }
+
+ final String oauthScope = this.roleMappings.get(coreSecurityRoleName);
+
+ if (oauthScope == null) {
+ unmappedRoles.add(coreRole);
+ }
+ else {
+ roleMappings.put(coreRole, oauthScope);
+ }
+ }
+
+ if (!unmappedRoles.isEmpty()) {
+ throw new IllegalArgumentException(
+ String.format("The following %s %s not mapped: %s.",
+ unmappedRoles.size(),
+ unmappedRoles.size() > 1 ? "roles are" : "role is",
+ StringUtils.collectionToDelimitedString(unmappedRoles, ", ")));
+ }
+
+ return roleMappings;
+ }
+
+ /**
+ * Sets the prefix which should be added to the authority name (if it doesn't already
+ * exist).
+ *
+ * @param rolePrefix Must not be null
+ *
+ */
+ public void setRolePrefix(String rolePrefix) {
+ Assert.notNull(rolePrefix, "rolePrefix cannot be null");
+ this.rolePrefix = rolePrefix;
+ }
+
+ public String getOauthScopePrefix() {
+ return oauthScopePrefix;
+ }
+
+ /**
+ *
+ * @param oauthScopePrefix Must not be null
+ */
+ public void setOauthScopePrefix(String oauthScopePrefix) {
+ Assert.notNull(rolePrefix, "oauthScopePrefix cannot be null");
+ this.oauthScopePrefix = oauthScopePrefix;
+ }
+
+ public String getRolePrefix() {
+ return rolePrefix;
+ }
+}
diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java
similarity index 83%
rename from spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java
rename to spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java
index 1260a504bf..458e8c5a6e 100644
--- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/AnotherEnumClass13.java
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/package-info.java
@@ -13,13 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
-package com.acme.boot13;
-
/**
- * An enum used in configuration properties class.
+ * Contains security related configuration classes.
*/
-public enum AnotherEnumClass13 {
- low,
- high;
-}
+package org.springframework.cloud.common.security;
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java
new file mode 100644
index 0000000000..cdf739a8e7
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AccessTokenClearingLogoutSuccessHandler.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.io.IOException;
+
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService;
+import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken;
+import org.springframework.security.web.authentication.logout.SimpleUrlLogoutSuccessHandler;
+import org.springframework.util.Assert;
+
+/**
+ * Customized {@link SimpleUrlLogoutSuccessHandler} that will remove the previously authenticated user's
+ * {@link OAuth2AuthorizedClient} from the underlying {@link OAuth2AuthorizedClientService}.
+ *
+ * @author Gunnar Hillert
+ * @since 1.3.0
+ */
+public class AccessTokenClearingLogoutSuccessHandler extends SimpleUrlLogoutSuccessHandler {
+
+ private static final Logger logger = LoggerFactory.getLogger(AccessTokenClearingLogoutSuccessHandler.class);
+
+ final OAuth2TokenUtilsService oauth2TokenUtilsService;
+
+ public AccessTokenClearingLogoutSuccessHandler(OAuth2TokenUtilsService oauth2TokenUtilsService) {
+ Assert.notNull(oauth2TokenUtilsService, "oauth2TokenUtilsService must not be null.");
+ this.oauth2TokenUtilsService = oauth2TokenUtilsService;
+ }
+
+ @Override
+ public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response,
+ Authentication authentication) throws IOException, ServletException {
+
+ if (authentication instanceof OAuth2AuthenticationToken) {
+ final OAuth2AuthenticationToken oauth2AuthenticationToken = (OAuth2AuthenticationToken) authentication;
+ final OAuth2AuthorizedClient oauth2AuthorizedClient = oauth2TokenUtilsService.getAuthorizedClient(oauth2AuthenticationToken);
+ oauth2TokenUtilsService.removeAuthorizedClient(oauth2AuthorizedClient);
+ logger.info("Removed OAuth2AuthorizedClient.");
+ }
+
+ super.handle(request, response, authentication);
+ }
+
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java
new file mode 100644
index 0000000000..70e8be71a3
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/AuthoritiesMapper.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+import org.springframework.security.core.GrantedAuthority;
+
+/**
+ * Maps scopes and claims into authorities.
+ *
+ * @author Gunnar Hillert
+ * @author Janne Valkealahti
+ */
+public interface AuthoritiesMapper {
+
+ /**
+ * Map the provided scopes to authorities.
+ *
+ * @param providerId If null, then the default providerId is used
+ * @param scopes the scopes to map
+ * @param token some implementation may need to make additional requests
+ * @return the mapped authorities
+ */
+ Set mapScopesToAuthorities(String providerId, Set scopes, String token);
+
+ /**
+ * Map the provided claims to authorities.
+ *
+ * @param providerId If null, then the default providerId is used
+ * @param claims the claims to map
+ * @return the mapped authorities
+ */
+ default Set mapClaimsToAuthorities(String providerId, List claims) {
+ return Collections.emptySet();
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java
new file mode 100644
index 0000000000..c8a3a77206
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CoreSecurityRoles.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2017-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Arrays;
+
+import org.springframework.util.Assert;
+
+/**
+ * Defines the core security roles supported by Spring Cloud Security.
+ *
+ * @author Gunnar Hillert
+ */
+public enum CoreSecurityRoles {
+
+ CREATE("CREATE", "role for create operations"),
+ DEPLOY("DEPLOY", "role for deploy operations"),
+ DESTROY("DESTROY", "role for destroy operations"),
+ MANAGE("MANAGE", "role for the boot management endpoints"),
+ MODIFY("MODIFY", "role for modify operations"),
+ SCHEDULE("SCHEDULE", "role for scheduling operations"),
+ VIEW("VIEW", "view role");
+
+ private String key;
+
+ private String name;
+
+ CoreSecurityRoles(final String key, final String name) {
+ this.key = key;
+ this.name = name;
+ }
+
+ public static CoreSecurityRoles fromKey(String role) {
+
+ Assert.hasText(role, "Parameter role must not be null or empty.");
+
+ for (CoreSecurityRoles roleType : CoreSecurityRoles.values()) {
+ if (roleType.getKey().equals(role)) {
+ return roleType;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Helper class that will return all role names as a string array.
+ *
+ * @return Never null
+ */
+ public static String[] getAllRolesAsStringArray() {
+ return Arrays.stream(CoreSecurityRoles.values()).map(CoreSecurityRoles::getKey)
+ .toArray(size -> new String[size]);
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java
new file mode 100644
index 0000000000..dc5ce9aa56
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomAuthoritiesOpaqueTokenIntrospector.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.oauth2.core.DefaultOAuth2AuthenticatedPrincipal;
+import org.springframework.security.oauth2.core.OAuth2AuthenticatedPrincipal;
+import org.springframework.security.oauth2.core.OAuth2TokenIntrospectionClaimNames;
+import org.springframework.security.oauth2.server.resource.introspection.NimbusOpaqueTokenIntrospector;
+import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector;
+
+/**
+ *
+ * @author Gunnar Hillert
+ * @since 1.3.0
+ */
+public class CustomAuthoritiesOpaqueTokenIntrospector implements OpaqueTokenIntrospector {
+
+ private static final Logger logger = LoggerFactory.getLogger(CustomAuthoritiesOpaqueTokenIntrospector.class);
+ private final OpaqueTokenIntrospector delegate;
+ private DefaultPrincipalExtractor principalExtractor;
+ private AuthoritiesMapper authorityMapper;
+
+ public CustomAuthoritiesOpaqueTokenIntrospector(
+ String introspectionUri,
+ String clientId,
+ String clientSecret,
+ AuthoritiesMapper authorityMapper) {
+ this.delegate = new NimbusOpaqueTokenIntrospector(introspectionUri, clientId, clientSecret);
+ this.principalExtractor = new DefaultPrincipalExtractor();
+ this.authorityMapper = authorityMapper;
+ }
+
+ @Override
+ public OAuth2AuthenticatedPrincipal introspect(String token) {
+ logger.debug("Introspecting");
+ OAuth2AuthenticatedPrincipal principal = this.delegate.introspect(token);
+ Object principalName = principalExtractor.extractPrincipal(principal.getAttributes());
+ return new DefaultOAuth2AuthenticatedPrincipal(
+ principalName.toString(), principal.getAttributes(), extractAuthorities(principal, token));
+ }
+
+ private Collection extractAuthorities(OAuth2AuthenticatedPrincipal principal, String token) {
+ final List scopes = principal.getAttribute(OAuth2TokenIntrospectionClaimNames.SCOPE);
+ final Set scopesAsSet = new HashSet<>(scopes);
+ final Set authorities = this.authorityMapper.mapScopesToAuthorities(null, scopesAsSet, token);
+ final Set authorities2 = this.authorityMapper.mapClaimsToAuthorities(null, Arrays.asList("groups", "roles"));
+ authorities.addAll(authorities2);
+ return authorities;
+ }
+
+ public void setPrincipalExtractor(DefaultPrincipalExtractor principalExtractor) {
+ this.principalExtractor = principalExtractor;
+ }
+
+ public void setAuthorityMapper(AuthoritiesMapper authorityMapper) {
+ this.authorityMapper = authorityMapper;
+ }
+
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java
new file mode 100644
index 0000000000..7ba93044f1
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomOAuth2OidcUserService.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest;
+import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserService;
+import org.springframework.security.oauth2.client.userinfo.OAuth2UserService;
+import org.springframework.security.oauth2.core.OAuth2AccessToken;
+import org.springframework.security.oauth2.core.OAuth2AuthenticationException;
+import org.springframework.security.oauth2.core.oidc.user.DefaultOidcUser;
+import org.springframework.security.oauth2.core.oidc.user.OidcUser;
+import org.springframework.util.StringUtils;
+
+/**
+ *
+ * @author Gunnar Hillert
+ * @author Janne Valkealahti
+ */
+public class CustomOAuth2OidcUserService implements OAuth2UserService {
+
+ private final static Logger log = LoggerFactory.getLogger(CustomOAuth2OidcUserService.class);
+ final OidcUserService delegate = new OidcUserService();
+ final AuthoritiesMapper authorityMapper;
+
+ public CustomOAuth2OidcUserService(AuthoritiesMapper authorityMapper) {
+ this.authorityMapper = authorityMapper;
+ }
+
+ @Override
+ public OidcUser loadUser(OidcUserRequest userRequest) throws OAuth2AuthenticationException {
+ log.debug("Load user");
+ final OidcUser oidcUser = delegate.loadUser(userRequest);
+ final OAuth2AccessToken accessToken = userRequest.getAccessToken();
+ final Set mappedAuthorities1 = this.authorityMapper.mapScopesToAuthorities(
+ userRequest.getClientRegistration().getRegistrationId(), accessToken.getScopes(),
+ accessToken.getTokenValue());
+
+ List roleClaims = oidcUser.getClaimAsStringList("groups");
+ if (roleClaims == null) {
+ roleClaims = oidcUser.getClaimAsStringList("roles");
+ }
+ if (roleClaims == null) {
+ roleClaims = new ArrayList<>();
+ }
+ log.debug("roleClaims: {}", roleClaims);
+ Set mappedAuthorities2 = this.authorityMapper
+ .mapClaimsToAuthorities(userRequest.getClientRegistration().getRegistrationId(), roleClaims);
+
+ final String userNameAttributeName = userRequest.getClientRegistration()
+ .getProviderDetails().getUserInfoEndpoint().getUserNameAttributeName();
+
+ log.debug("AccessToken: {}", accessToken.getTokenValue());
+
+ HashSet mappedAuthorities = new HashSet<>(mappedAuthorities1);
+ mappedAuthorities.addAll(mappedAuthorities2);
+
+ final OidcUser oidcUserToReturn;
+ // OidcUser oidcUserToReturn;
+
+ if (StringUtils.hasText(userNameAttributeName)) {
+ oidcUserToReturn = new DefaultOidcUser(mappedAuthorities, userRequest.getIdToken(), oidcUser.getUserInfo(),
+ userNameAttributeName);
+ } else {
+ oidcUserToReturn = new DefaultOidcUser(mappedAuthorities, userRequest.getIdToken(), oidcUser.getUserInfo());
+ }
+ return oidcUserToReturn;
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java
new file mode 100644
index 0000000000..249f6d6688
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/CustomPlainOAuth2UserService.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Set;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.oauth2.client.userinfo.DefaultOAuth2UserService;
+import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
+import org.springframework.security.oauth2.client.userinfo.OAuth2UserService;
+import org.springframework.security.oauth2.core.OAuth2AccessToken;
+import org.springframework.security.oauth2.core.OAuth2AuthenticationException;
+import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
+import org.springframework.security.oauth2.core.user.OAuth2User;
+
+/**
+ *
+ * @author Gunnar Hillert
+ * @author Janne Valkealahti
+ */
+public class CustomPlainOAuth2UserService implements OAuth2UserService {
+
+ private final static Logger log = LoggerFactory.getLogger(CustomPlainOAuth2UserService.class);
+ final DefaultOAuth2UserService delegate = new DefaultOAuth2UserService();
+ final AuthoritiesMapper authorityMapper;
+
+ public CustomPlainOAuth2UserService(AuthoritiesMapper authorityMapper) {
+ this.authorityMapper = authorityMapper;
+ }
+
+ @Override
+ public OAuth2User loadUser(OAuth2UserRequest userRequest) throws OAuth2AuthenticationException {
+ log.debug("Load user");
+ final OAuth2User oauth2User = delegate.loadUser(userRequest);
+ final OAuth2AccessToken accessToken = userRequest.getAccessToken();
+ log.debug("AccessToken: {}", accessToken.getTokenValue());
+
+ final Set mappedAuthorities = this.authorityMapper.mapScopesToAuthorities(
+ userRequest.getClientRegistration().getRegistrationId(), accessToken.getScopes(),
+ accessToken.getTokenValue());
+ final String userNameAttributeName = userRequest.getClientRegistration()
+ .getProviderDetails().getUserInfoEndpoint().getUserNameAttributeName();
+ final OAuth2User oauth2UserToReturn = new DefaultOAuth2User(mappedAuthorities, oauth2User.getAttributes(),
+ userNameAttributeName);
+ return oauth2UserToReturn;
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java
new file mode 100644
index 0000000000..b5e9dc82e4
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapper.java
@@ -0,0 +1,233 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.cloud.common.security.ProviderRoleMapping;
+import org.springframework.security.config.core.GrantedAuthorityDefaults;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.util.Assert;
+import org.springframework.util.StringUtils;
+
+/**
+ * Default {@link AuthoritiesMapper}.
+ *
+ * @author Gunnar Hillert
+ * @author Janne Valkealahti
+ */
+public class DefaultAuthoritiesMapper implements AuthoritiesMapper {
+
+ private static final Logger logger = LoggerFactory.getLogger(DefaultAuthoritiesMapper.class);
+ private final Map providerRoleMappings;
+ private final String defaultProviderId;
+
+ public DefaultAuthoritiesMapper(Map providerRoleMappings, String defaultProviderId) {
+ super();
+
+ Assert.notNull(providerRoleMappings, "providerRoleMappings must not be null.");
+ for (Entry providerRoleMappingToValidate : providerRoleMappings.entrySet()) {
+ providerRoleMappingToValidate.getValue().convertRoleMappingKeysToCoreSecurityRoles();
+ }
+
+ this.providerRoleMappings = providerRoleMappings;
+ this.defaultProviderId = defaultProviderId;
+ }
+
+ /**
+ * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a
+ * single {@link ProviderRoleMapping}.
+ *
+ * @param providerId Create a ProviderRoleMapping with the specified providerId
+ * @param mapOAuthScopes Shall OAuth scopes be considered?
+ * @param roleMappings Used to populate the ProviderRoleMapping
+ */
+ public DefaultAuthoritiesMapper(String providerId, boolean mapOAuthScopes, Map roleMappings) {
+ Assert.hasText(providerId, "The providerId must not be null or empty.");
+ final ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(mapOAuthScopes, roleMappings);
+ this.providerRoleMappings = new HashMap(1);
+ this.providerRoleMappings.put(providerId, providerRoleMapping);
+ for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) {
+ providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles();
+ }
+ this.defaultProviderId = providerId;
+ }
+
+ /**
+ * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a
+ * single {@link ProviderRoleMapping}.
+ *
+ * @param providerId The provider id for the ProviderRoleMapping
+ * @param mapOAuthScopes Consider scopes?
+ */
+ public DefaultAuthoritiesMapper(String providerId, boolean mapOAuthScopes) {
+ Assert.hasText(providerId, "The providerId must not be null or empty.");
+ final ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping(mapOAuthScopes);
+ this.providerRoleMappings = new HashMap(1);
+ this.providerRoleMappings.put(providerId, providerRoleMapping);
+ for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) {
+ providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles();
+ }
+ this.defaultProviderId = providerId;
+ }
+
+ /**
+ * Convenience constructor that will create a {@link DefaultAuthoritiesMapper} with a
+ * single {@link ProviderRoleMapping}.
+ *
+ * @param providerId The provider id for the ProviderRoleMapping
+ * @param providerRoleMapping The role mappings to add to the {@link ProviderRoleMapping}
+ */
+ public DefaultAuthoritiesMapper(String providerId, ProviderRoleMapping providerRoleMapping) {
+ this.providerRoleMappings = new HashMap(1);
+ this.providerRoleMappings.put(providerId, providerRoleMapping);
+ for (ProviderRoleMapping providerRoleMappingToValidate : providerRoleMappings.values()) {
+ providerRoleMappingToValidate.convertRoleMappingKeysToCoreSecurityRoles();
+ }
+ this.defaultProviderId = providerId;
+ }
+
+ /**
+ * The returned {@link List} of {@link GrantedAuthority}s contains all roles from
+ * {@link CoreSecurityRoles}. The roles are prefixed with the value specified in
+ * {@link GrantedAuthorityDefaults}.
+ *
+ * @param clientIdParam If null, the default defaultProviderId is used
+ * @param scopes Must not be null
+ * @param token Ignored in this implementation
+ */
+ @Override
+ public Set mapScopesToAuthorities(String clientIdParam, Set scopes, String token) {
+ logger.debug("Mapping scopes to authorities");
+ final String clientId;
+ if (clientIdParam == null) {
+ clientId = this.defaultProviderId;
+ }
+ else {
+ clientId = clientIdParam;
+ }
+ Assert.notNull(scopes, "The scopes argument must not be null.");
+
+ final ProviderRoleMapping roleMapping = this.providerRoleMappings.get(clientId);
+
+ if (roleMapping == null) {
+ throw new IllegalArgumentException("No role mapping found for clientId " + clientId);
+ }
+
+ final List rolesAsStrings = new ArrayList<>();
+
+ Set grantedAuthorities = new HashSet<>();
+
+ if (roleMapping.isMapOauthScopes()) {
+ if (!scopes.isEmpty()) {
+ for (Map.Entry roleMappingEngtry : roleMapping.convertRoleMappingKeysToCoreSecurityRoles().entrySet()) {
+ final CoreSecurityRoles role = roleMappingEngtry.getKey();
+ final String expectedOAuthScope = roleMappingEngtry.getValue();
+ Set scopeList = roleMapping.isParseOauthScopePathParts() ? pathParts(scopes) : scopes;
+ for (String scope : scopeList) {
+ if (scope.equalsIgnoreCase(expectedOAuthScope)) {
+ final SimpleGrantedAuthority oauthRoleAuthority = new SimpleGrantedAuthority(roleMapping.getRolePrefix() + role.getKey());
+ rolesAsStrings.add(oauthRoleAuthority.getAuthority());
+ grantedAuthorities.add(oauthRoleAuthority);
+ }
+ }
+ }
+ logger.info("Adding roles: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings));
+ }
+ }
+ else if (!roleMapping.isMapGroupClaims()) {
+ grantedAuthorities =
+ roleMapping.convertRoleMappingKeysToCoreSecurityRoles().entrySet().stream().map(mapEntry -> {
+ final CoreSecurityRoles role = mapEntry.getKey();
+ rolesAsStrings.add(role.getKey());
+ return new SimpleGrantedAuthority(roleMapping.getRolePrefix() + mapEntry.getKey());
+ }).collect(Collectors.toSet());
+ logger.info("Adding ALL roles: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings));
+ }
+ return grantedAuthorities;
+ }
+
+ @Override
+ public Set mapClaimsToAuthorities(String clientIdParam, List claims) {
+ logger.debug("Mapping claims to authorities");
+ final String clientId;
+ if (clientIdParam == null) {
+ clientId = this.defaultProviderId;
+ }
+ else {
+ clientId = clientIdParam;
+ }
+
+ final ProviderRoleMapping groupMapping = this.providerRoleMappings.get(clientId);
+ if (groupMapping == null) {
+ throw new IllegalArgumentException("No role mapping found for clientId " + clientId);
+ }
+
+ final List rolesAsStrings = new ArrayList<>();
+ final Set grantedAuthorities = new HashSet<>();
+
+ if (groupMapping.isMapGroupClaims()) {
+ if (!claims.isEmpty()) {
+ for (Map.Entry roleMappingEngtry : groupMapping.convertGroupMappingKeysToCoreSecurityRoles().entrySet()) {
+ final CoreSecurityRoles role = roleMappingEngtry.getKey();
+ final String expectedOAuthScope = roleMappingEngtry.getValue();
+ logger.debug("Checking group mapping {} {}", role, expectedOAuthScope);
+ for (String claim : claims) {
+ logger.debug("Checking against claim {} {}", claim, expectedOAuthScope);
+ if (claim.equalsIgnoreCase(expectedOAuthScope)) {
+ final SimpleGrantedAuthority oauthRoleAuthority = new SimpleGrantedAuthority(groupMapping.getRolePrefix() + role.getKey());
+ rolesAsStrings.add(oauthRoleAuthority.getAuthority());
+ grantedAuthorities.add(oauthRoleAuthority);
+ logger.debug("Adding to granted authorities {}", oauthRoleAuthority);
+ }
+ }
+ }
+ logger.info("Adding groups: {}.", StringUtils.collectionToCommaDelimitedString(rolesAsStrings));
+ }
+ }
+
+ return grantedAuthorities;
+ }
+
+ private Set pathParts(Set scopes) {
+ // String away leading part if scope is something like
+ // api://dataflow-server/dataflow.create resulting dataflow.create
+ return scopes.stream().map(scope -> {
+ try {
+ URI uri = URI.create(scope);
+ String path = uri.getPath();
+ if (StringUtils.hasText(path) && path.charAt(0) == '/') {
+ return path.substring(1);
+ }
+ } catch (Exception e) {
+ }
+ return scope;
+ })
+ .collect(Collectors.toSet());
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java
new file mode 100644
index 0000000000..063c6b7917
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultOAuth2TokenUtilsService.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService;
+import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken;
+import org.springframework.security.oauth2.core.AbstractOAuth2Token;
+import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthentication;
+import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationToken;
+import org.springframework.util.Assert;
+import org.springframework.util.StringUtils;
+
+/**
+ * Utility methods for retrieving access tokens.
+ *
+ * @author Gunnar Hillert
+ */
+public class DefaultOAuth2TokenUtilsService implements OAuth2TokenUtilsService {
+
+ private final OAuth2AuthorizedClientService oauth2AuthorizedClientService;
+
+ public DefaultOAuth2TokenUtilsService(OAuth2AuthorizedClientService oauth2AuthorizedClientService) {
+ Assert.notNull(oauth2AuthorizedClientService, "oauth2AuthorizedClientService must not be null.");
+ this.oauth2AuthorizedClientService = oauth2AuthorizedClientService;
+ }
+
+ /**
+ * Retrieves the access token from the {@link Authentication} implementation.
+ *
+ * @return May return null.
+ */
+ @Override
+ public String getAccessTokenOfAuthenticatedUser() {
+
+ final Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
+
+ if (authentication == null) {
+ throw new IllegalStateException("Cannot retrieve the authentication object from the SecurityContext. Are you authenticated?");
+ }
+
+ final String accessTokenOfAuthenticatedUser;
+
+ if (authentication instanceof BearerTokenAuthentication) {
+ accessTokenOfAuthenticatedUser = ((BearerTokenAuthentication) authentication).getToken().getTokenValue();
+ }
+ else if (authentication instanceof OAuth2AuthenticationToken) {
+ final OAuth2AuthenticationToken oauth2AuthenticationToken = (OAuth2AuthenticationToken) authentication;
+ final OAuth2AuthorizedClient oauth2AuthorizedClient = this.getAuthorizedClient(oauth2AuthenticationToken);
+ accessTokenOfAuthenticatedUser = oauth2AuthorizedClient.getAccessToken().getTokenValue();
+ }
+ else if (authentication instanceof JwtAuthenticationToken) {
+ AbstractOAuth2Token token = (AbstractOAuth2Token) authentication.getCredentials();
+ accessTokenOfAuthenticatedUser = token.getTokenValue();
+ }
+ else {
+ throw new IllegalStateException("Unsupported authentication object type " + authentication);
+ }
+
+ return accessTokenOfAuthenticatedUser;
+ }
+
+ @Override
+ public OAuth2AuthorizedClient getAuthorizedClient(OAuth2AuthenticationToken auth2AuthenticationToken) {
+
+ final String principalName = auth2AuthenticationToken.getName();
+ final String clientRegistrationId = auth2AuthenticationToken.getAuthorizedClientRegistrationId();
+
+ if (!StringUtils.hasText(principalName)) {
+ throw new IllegalStateException("The retrieved principalName must not be null or empty.");
+ }
+
+ if (!StringUtils.hasText(clientRegistrationId)) {
+ throw new IllegalStateException("The retrieved clientRegistrationId must not be null or empty.");
+ }
+
+ final OAuth2AuthorizedClient oauth2AuthorizedClient = this.oauth2AuthorizedClientService.loadAuthorizedClient(clientRegistrationId, principalName);
+
+ if (oauth2AuthorizedClient == null) {
+ throw new IllegalStateException(String.format(
+ "No oauth2AuthorizedClient returned for clientRegistrationId '%s' and principalName '%s'.",
+ clientRegistrationId, principalName));
+ }
+ return oauth2AuthorizedClient;
+ }
+
+ @Override
+ public void removeAuthorizedClient(OAuth2AuthorizedClient auth2AuthorizedClient) {
+ Assert.notNull(auth2AuthorizedClient, "The auth2AuthorizedClient must not be null.");
+ this.oauth2AuthorizedClientService.removeAuthorizedClient(
+ auth2AuthorizedClient.getClientRegistration().getRegistrationId(),
+ auth2AuthorizedClient.getPrincipalName());
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java
new file mode 100644
index 0000000000..a8d5254993
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/DefaultPrincipalExtractor.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2018-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Map;
+
+/**
+ * The default implementation of the {@link PrincipalExtractor} that extracts the username
+ * of the principal.
+ *
+ * @author Gunnar Hillert
+ *
+ */
+public class DefaultPrincipalExtractor implements PrincipalExtractor {
+
+ private static final String[] PRINCIPAL_KEYS = new String[] { "user_name", "user", "username",
+ "userid", "user_id", "login", "id", "name", "cid", "client_id" };
+
+ @Override
+ public Object extractPrincipal(Map map) {
+ for (String key : PRINCIPAL_KEYS) {
+ if (map.containsKey(key)) {
+ return map.get(key);
+ }
+ }
+ return null;
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java
new file mode 100644
index 0000000000..799d44b0c4
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapper.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2018-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.net.URI;
+import java.util.HashSet;
+import java.util.Locale;
+import java.util.Set;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.oauth2.client.http.OAuth2ErrorResponseErrorHandler;
+import org.springframework.security.oauth2.core.OAuth2AccessToken;
+import org.springframework.util.Assert;
+import org.springframework.util.StringUtils;
+import org.springframework.web.client.RestOperations;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * {@link AuthoritiesMapper} that looks up
+ * {@link CoreSecurityRoles} from an external HTTP resource. Requests to the
+ * external HTTP resource are authenticated by forwarding the user's access
+ * token. The external resource's response body MUST be a JSON array
+ * containing strings with values corresponding to
+ * {@link CoreSecurityRoles#key} values. For example, a response containing
+ * {@code ["VIEW", "CREATE"]} would grant the user
+ * {@code ROLE_VIEW, ROLE_CREATE},
+ *
+ * @author Mike Heath
+ * @author Gunnar Hillert
+ */
+public class ExternalOauth2ResourceAuthoritiesMapper implements AuthoritiesMapper {
+
+ private static final Logger logger = LoggerFactory.getLogger(ExternalOauth2ResourceAuthoritiesMapper.class);
+
+ public static final GrantedAuthority CREATE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.CREATE.getKey());
+ public static final GrantedAuthority DEPLOY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.DEPLOY.getKey());
+ public static final GrantedAuthority DESTROY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.DESTROY.getKey());
+ public static final GrantedAuthority MANAGE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.MANAGE.getKey());
+ public static final GrantedAuthority MODIFY = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.MODIFY.getKey());
+ public static final GrantedAuthority SCHEDULE = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.SCHEDULE.getKey());
+ public static final GrantedAuthority VIEW = new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + CoreSecurityRoles.VIEW.getKey());
+
+ private final URI roleProviderUri;
+ private final RestOperations restOperations;
+
+ /**
+ *
+ * @param roleProviderUri a HTTP GET request is sent to this URI to fetch
+ * the user's security roles
+ */
+ public ExternalOauth2ResourceAuthoritiesMapper(
+ URI roleProviderUri) {
+ Assert.notNull(roleProviderUri, "The provided roleProviderUri must not be null.");
+ this.roleProviderUri = roleProviderUri;
+
+ final RestTemplate restTemplate = new RestTemplate();
+ restTemplate.setErrorHandler(new OAuth2ErrorResponseErrorHandler());
+ this.restOperations = restTemplate;
+ }
+
+
+ @Override
+ public Set mapScopesToAuthorities(String providerId, Set scopes, String token) {
+ logger.debug("Getting permissions from {}", roleProviderUri);
+
+ final HttpHeaders headers = new HttpHeaders();
+ headers.add(HttpHeaders.AUTHORIZATION, OAuth2AccessToken.TokenType.BEARER.getValue() + " " + token);
+
+ final HttpEntity entity = new HttpEntity<>(null, headers);
+ final ResponseEntity response = restOperations.exchange(roleProviderUri, HttpMethod.GET, entity, String[].class);
+
+ final Set authorities = new HashSet<>();
+ for (String permission : response.getBody()) {
+ if (!StringUtils.hasText(permission)) {
+ logger.warn("Received an empty permission from {}", roleProviderUri);
+ } else {
+ final CoreSecurityRoles securityRole = CoreSecurityRoles.fromKey(permission.toUpperCase(Locale.ROOT));
+ if (securityRole == null) {
+ logger.warn("Invalid role {} provided by {}", permission, roleProviderUri);
+ } else {
+ switch (securityRole) {
+ case CREATE:
+ authorities.add(CREATE);
+ break;
+ case DEPLOY:
+ authorities.add(DEPLOY);
+ break;
+ case DESTROY:
+ authorities.add(DESTROY);
+ break;
+ case MANAGE:
+ authorities.add(MANAGE);
+ break;
+ case MODIFY:
+ authorities.add(MODIFY);
+ break;
+ case SCHEDULE:
+ authorities.add(SCHEDULE);
+ break;
+ case VIEW:
+ authorities.add(VIEW);
+ break;
+ }
+ }
+ }
+ }
+ logger.info("Roles added for user: {}.", authorities);
+ return authorities;
+ }
+}
+
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java
new file mode 100644
index 0000000000..e31c908e8a
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverter.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2020-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.common.security.support;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.oauth2.jwt.Jwt;
+import org.springframework.util.Assert;
+import org.springframework.util.ObjectUtils;
+import org.springframework.util.StringUtils;
+
+/**
+ * Extracts the {@link GrantedAuthority}s from scope attributes typically found
+ * in a {@link Jwt}.
+ *
+ * @author Gunnar Hillert
+ * @author Janne Valkealahti
+ */
+public final class MappingJwtGrantedAuthoritiesConverter implements Converter> {
+
+ private final static Logger log = LoggerFactory.getLogger(MappingJwtGrantedAuthoritiesConverter.class);
+ private static final String DEFAULT_AUTHORITY_PREFIX = "SCOPE_";
+
+ private static final Collection WELL_KNOWN_SCOPES_CLAIM_NAMES =
+ Arrays.asList("scope", "scp");
+ private static final Collection WELL_KNOWN_GROUPS_CLAIM_NAMES =
+ Arrays.asList("groups", "roles");
+
+ private String authorityPrefix = DEFAULT_AUTHORITY_PREFIX;
+
+ private String authoritiesClaimName;
+ private String groupAuthoritiesClaimName;
+
+ private Map roleAuthoritiesMapping = new HashMap<>();
+ private Map groupAuthoritiesMapping = new HashMap<>();
+
+ /**
+ * Extract {@link GrantedAuthority}s from the given {@link Jwt}.
+ *
+ * @param jwt The {@link Jwt} token
+ * @return The {@link GrantedAuthority authorities} read from the token scopes
+ */
+ @Override
+ public Collection convert(Jwt jwt) {
+ log.debug("JWT: {}", jwt.getTokenValue());
+ Set collect = getAuthorities(jwt).stream()
+ .flatMap(authority -> {
+ if (roleAuthoritiesMapping.isEmpty() && groupAuthoritiesMapping.isEmpty()) {
+ return Stream.of(authority);
+ }
+ Stream s1 = roleAuthoritiesMapping.entrySet().stream()
+ .filter(entry -> entry.getValue().equals(authority))
+ .map(entry -> entry.getKey()).distinct();
+ Stream s2 = groupAuthoritiesMapping.entrySet().stream()
+ .filter(entry -> entry.getValue().equals(authority))
+ .map(entry -> entry.getKey()).distinct();
+ return Stream.concat(s1, s2);
+ })
+ .distinct()
+ .map(authority -> new SimpleGrantedAuthority(this.authorityPrefix + authority))
+ .collect(Collectors.toSet());
+ log.debug("JWT granted: {}", collect);
+ return collect;
+ }
+
+ /**
+ * Sets the prefix to use for {@link GrantedAuthority authorities} mapped by this converter.
+ * Defaults to {@link JwtGrantedAuthoritiesConverter#DEFAULT_AUTHORITY_PREFIX}.
+ *
+ * @param authorityPrefix The authority prefix
+ */
+ public void setAuthorityPrefix(String authorityPrefix) {
+ Assert.notNull(authorityPrefix, "authorityPrefix cannot be null");
+ this.authorityPrefix = authorityPrefix;
+ }
+
+ /**
+ * Sets the name of token claim to use for mapping {@link GrantedAuthority
+ * authorities} by this converter. Defaults to
+ * {@link JwtGrantedAuthoritiesConverter#WELL_KNOWN_SCOPES_CLAIM_NAMES}.
+ *
+ * @param authoritiesClaimName The token claim name to map authorities
+ */
+ public void setAuthoritiesClaimName(String authoritiesClaimName) {
+ Assert.hasText(authoritiesClaimName, "authoritiesClaimName cannot be empty");
+ this.authoritiesClaimName = authoritiesClaimName;
+ }
+
+ /**
+ * Set the mapping from resolved authorities from jwt into granted authorities.
+ *
+ * @param authoritiesMapping the authoritiesMapping to set
+ */
+ public void setAuthoritiesMapping(Map authoritiesMapping) {
+ Assert.notNull(authoritiesMapping, "authoritiesMapping cannot be null");
+ this.roleAuthoritiesMapping = authoritiesMapping;
+ }
+
+ /**
+ * Sets the name of token claim to use for group mapping {@link GrantedAuthority
+ * authorities} by this converter. Defaults to
+ * {@link org.springframework.security.oauth2.server.resource.authentication.JwtGrantedAuthoritiesConverter#WELL_KNOWN_AUTHORITIES_CLAIM_NAMES}.
+ *
+ * @param groupAuthoritiesClaimName the token claim name to map group
+ * authorities
+ */
+ public void setGroupAuthoritiesClaimName(String groupAuthoritiesClaimName) {
+ this.groupAuthoritiesClaimName = groupAuthoritiesClaimName;
+ }
+
+ /**
+ * Set the group mapping from resolved authorities from jwt into granted
+ * authorities.
+ *
+ * @param groupAuthoritiesMapping
+ */
+ public void setGroupAuthoritiesMapping(Map groupAuthoritiesMapping) {
+ this.groupAuthoritiesMapping = groupAuthoritiesMapping;
+ }
+
+ private String getAuthoritiesClaimName(Jwt jwt) {
+ if (this.authoritiesClaimName != null) {
+ return this.authoritiesClaimName;
+ }
+ for (String claimName : WELL_KNOWN_SCOPES_CLAIM_NAMES) {
+ if (jwt.hasClaim(claimName)) {
+ return claimName;
+ }
+ }
+ return null;
+ }
+
+ private String getGroupAuthoritiesClaimName(Jwt jwt) {
+ if (this.groupAuthoritiesClaimName != null) {
+ return this.groupAuthoritiesClaimName;
+ }
+ for (String claimName : WELL_KNOWN_GROUPS_CLAIM_NAMES) {
+ if (jwt.hasClaim(claimName)) {
+ return claimName;
+ }
+ }
+ return null;
+ }
+
+ private Collection getAuthorities(Jwt jwt) {
+ String scopeClaimName = getAuthoritiesClaimName(jwt);
+ String groupClaimName = getGroupAuthoritiesClaimName(jwt);
+
+ List claimAsStringList1 = null;
+ List claimAsStringList2 = null;
+
+ // spring-sec does wrong conversion with arrays
+ if (scopeClaimName != null && !ObjectUtils.isArray(jwt.getClaim(scopeClaimName))) {
+ claimAsStringList1 = jwt.getClaimAsStringList(scopeClaimName);
+ }
+ if (groupClaimName != null && !ObjectUtils.isArray(jwt.getClaim(groupClaimName))) {
+ claimAsStringList2 = jwt.getClaimAsStringList(groupClaimName);
+ }
+
+ List claimAsStringList = new ArrayList<>();
+ if (claimAsStringList1 != null) {
+ List collect = claimAsStringList1.stream()
+ .flatMap(c -> Arrays.stream(c.split(" ")))
+ .filter(c -> StringUtils.hasText(c))
+ .collect(Collectors.toList());
+ claimAsStringList.addAll(collect);
+ }
+ if (claimAsStringList2 != null) {
+ claimAsStringList.addAll(claimAsStringList2);
+ }
+ return claimAsStringList;
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java
new file mode 100644
index 0000000000..c5ad6f25af
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityDisabled.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2016-2018 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import org.springframework.boot.autoconfigure.condition.NoneNestedConditions;
+import org.springframework.context.annotation.Condition;
+import org.springframework.context.annotation.Conditional;
+
+/**
+ * {@link Condition} that is only valid if {@code security.basic.enabled} is {@code true}
+ * and the property {@code security.oauth2} exists.
+ *
+ * @author Gunnar Hillert
+ * @since 1.1.0
+ */
+public class OnOAuth2SecurityDisabled extends NoneNestedConditions {
+
+ public OnOAuth2SecurityDisabled() {
+ super(ConfigurationPhase.REGISTER_BEAN);
+ }
+
+ @Conditional(OnOAuth2SecurityEnabled.class)
+ static class OAuthEnabled {
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java
new file mode 100644
index 0000000000..fbd0c656b3
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/OnOAuth2SecurityEnabled.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2016-2018 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Collections;
+import java.util.Map;
+
+import org.springframework.boot.autoconfigure.condition.ConditionOutcome;
+import org.springframework.boot.autoconfigure.condition.SpringBootCondition;
+import org.springframework.boot.context.properties.bind.Bindable;
+import org.springframework.boot.context.properties.bind.Binder;
+import org.springframework.context.annotation.Condition;
+import org.springframework.context.annotation.ConditionContext;
+import org.springframework.core.env.Environment;
+import org.springframework.core.type.AnnotatedTypeMetadata;
+
+/**
+ * {@link Condition} that is only valid if the property
+ * {@code security.oauth2.client.client-id} exists.
+ *
+ * @author Gunnar Hillert
+ * @since 1.1.0
+ */
+public class OnOAuth2SecurityEnabled extends SpringBootCondition {
+
+ @Override
+ public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
+ Map properties = getSubProperties(context.getEnvironment(), "spring.security.oauth2");
+ return new ConditionOutcome(!properties.isEmpty(), "OAuth2 Enabled");
+ }
+
+ public static Map getSubProperties(Environment environment, String keyPrefix) {
+ return Binder.get(environment)
+ .bind(keyPrefix, Bindable.mapOf(String.class, String.class))
+ .orElseGet(Collections::emptyMap);
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java
new file mode 100644
index 0000000000..4fb9e18b45
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/PrincipalExtractor.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2018-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Map;
+
+/**
+ * @author Gunnar Hillert
+ * @since 1.3.0
+ *
+ */
+public interface PrincipalExtractor {
+
+ Object extractPrincipal(Map map);
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java
new file mode 100644
index 0000000000..efbaf67abf
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityConfigUtils.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2017-2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.slf4j.LoggerFactory;
+
+import org.springframework.cloud.common.security.AuthorizationProperties;
+import org.springframework.http.HttpMethod;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configurers.AuthorizeHttpRequestsConfigurer;
+import org.springframework.security.web.access.expression.WebExpressionAuthorizationManager;
+import org.springframework.util.Assert;
+import org.springframework.util.StringUtils;
+
+/**
+ * State-holder for computed security meta-information.
+ *
+ * @author Gunnar Hillert
+ */
+public class SecurityConfigUtils {
+
+ private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SecurityConfigUtils.class);
+
+ public static final String ROLE_PREFIX = "ROLE_";
+
+ public static final Pattern AUTHORIZATION_RULE;
+
+ public static final String BASIC_AUTH_REALM_NAME = "Spring";
+
+ static {
+ String methodsRegex = StringUtils.arrayToDelimitedString(HttpMethod.values(), "|");
+ AUTHORIZATION_RULE = Pattern.compile("(" + methodsRegex + ")\\s+(.+)\\s+=>\\s+(.+)");
+ }
+
+ /**
+ * Read the configuration for "simple" (that is, not ACL based) security and apply it.
+ *
+ * @param auth The Configurer to apply the authorization rules to
+ * @param authorizationProperties Contains the rules to configure authorization
+ */
+ public static void configureSimpleSecurity(
+ AuthorizeHttpRequestsConfigurer.AuthorizationManagerRequestMatcherRegistry auth,
+ AuthorizationProperties authorizationProperties) {
+ for (String rule : authorizationProperties.getRules()) {
+ Matcher matcher = AUTHORIZATION_RULE.matcher(rule);
+ Assert.isTrue(matcher.matches(),
+ String.format("Unable to parse security rule [%s], expected format is 'HTTP_METHOD ANT_PATTERN => "
+ + "SECURITY_ATTRIBUTE(S)'", rule));
+
+ HttpMethod method = HttpMethod.valueOf(matcher.group(1).trim());
+ String urlPattern = matcher.group(2).trim();
+ String attribute = matcher.group(3).trim();
+
+ logger.info("Authorization '{}' | '{}' | '{}'", method, attribute, urlPattern);
+ auth.requestMatchers(method, urlPattern).access(new WebExpressionAuthorizationManager(attribute));
+ }
+
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java
new file mode 100644
index 0000000000..2641ce9f63
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/java/org/springframework/cloud/common/security/support/SecurityStateBean.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017-2018 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+/**
+ * State-holder for computed security meta-information.
+ *
+ * @author Gunnar Hillert
+ */
+public class SecurityStateBean {
+
+ private boolean authenticationEnabled;
+
+ public SecurityStateBean() {
+ super();
+ }
+
+ public boolean isAuthenticationEnabled() {
+ return authenticationEnabled;
+ }
+
+ public void setAuthenticationEnabled(boolean authenticationEnabled) {
+ this.authenticationEnabled = authenticationEnabled;
+ }
+
+}
diff --git a/spring-cloud-dataflow-server-core/.jdk8 b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring.factories
similarity index 100%
rename from spring-cloud-dataflow-server-core/.jdk8
rename to spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring.factories
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
new file mode 100644
index 0000000000..cc9b88b973
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
@@ -0,0 +1 @@
+org.springframework.cloud.common.security.CommonSecurityAutoConfiguration
\ No newline at end of file
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java
new file mode 100644
index 0000000000..f39a46fe35
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityDisabledTests.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2018-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import org.junit.jupiter.api.Test;
+
+import org.springframework.boot.test.util.TestPropertyValues;
+import org.springframework.cloud.common.security.support.OnOAuth2SecurityDisabled;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Conditional;
+import org.springframework.context.annotation.Configuration;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class OnOAuth2SecurityDisabledTests {
+
+ @Test
+ public void noPropertySet() throws Exception {
+ AnnotationConfigApplicationContext context = load(Config.class);
+ assertThat(context.containsBean("myBean")).isTrue();
+ context.close();
+ }
+
+ @Test
+ public void propertyClientIdSet() throws Exception {
+ AnnotationConfigApplicationContext context =
+ load(Config.class, "spring.security.oauth2.client.registration.uaa.client-id:12345");
+ assertThat(context.containsBean("myBean")).isFalse();
+ context.close();
+ }
+
+ private AnnotationConfigApplicationContext load(Class> config, String... env) {
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
+ TestPropertyValues.of(env).applyTo(context);
+ context.register(config);
+ context.refresh();
+ return context;
+ }
+
+ @Configuration
+ @Conditional(OnOAuth2SecurityDisabled.class)
+ public static class Config {
+ @Bean
+ public String myBean() {
+ return "myBean";
+ }
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java
new file mode 100644
index 0000000000..4bcfe1789c
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/OnOAuth2SecurityEnabledTests.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2016-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import org.junit.jupiter.api.Test;
+
+import org.springframework.boot.test.util.TestPropertyValues;
+import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Conditional;
+import org.springframework.context.annotation.Configuration;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+/**
+ * @author Gunnar Hillert
+ */
+public class OnOAuth2SecurityEnabledTests {
+
+ @Test
+ public void noPropertySet() throws Exception {
+ AnnotationConfigApplicationContext context = load(Config.class);
+ assertThat(context.containsBean("myBean")).isFalse();
+ context.close();
+ }
+
+ @Test
+ public void propertySecurityOauth() throws Exception {
+ assertThatThrownBy(() -> {
+ load(Config.class, "spring.security.oauth2");
+ }).isInstanceOf(IllegalStateException.class);
+ }
+
+ @Test
+ public void propertyClientId() throws Exception {
+ AnnotationConfigApplicationContext context = load(Config.class,
+ "spring.security.oauth2.client.registration.uaa.client-id:12345");
+ assertThat(context.containsBean("myBean")).isTrue();
+ context.close();
+ }
+
+ @Test
+ public void clientIdOnlyWithNoValue() throws Exception {
+ AnnotationConfigApplicationContext context = load(Config.class,
+ "spring.security.oauth2.client.registration.uaa.client-id");
+ assertThat(context.containsBean("myBean")).isTrue();
+ context.close();
+ }
+
+ private AnnotationConfigApplicationContext load(Class> config, String... env) {
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
+ TestPropertyValues.of(env).applyTo(context);
+ context.register(config);
+ context.refresh();
+ return context;
+ }
+
+ @Configuration
+ @Conditional(OnOAuth2SecurityEnabled.class)
+ public static class Config {
+ @Bean
+ public String myBean() {
+ return "myBean";
+ }
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java
new file mode 100644
index 0000000000..3332ae70f3
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/DefaultAuthoritiesMapperTests.java
@@ -0,0 +1,303 @@
+/*
+ * Copyright 2017-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.junit.jupiter.api.Test;
+
+import org.springframework.cloud.common.security.ProviderRoleMapping;
+import org.springframework.security.core.GrantedAuthority;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+/**
+ * @author Gunnar Hillert
+ */
+class DefaultAuthoritiesMapperTests {
+
+ @Test
+ void nullConstructor() throws Exception {
+ assertThatThrownBy(() -> {
+ new DefaultAuthoritiesMapper(null, "");
+ }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("providerRoleMappings must not be null.");
+ }
+
+ @Test
+ void mapScopesToAuthoritiesWithNullParameters() throws Exception {
+ DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper(Collections.emptyMap(), "");
+
+ assertThatThrownBy(() -> {
+ authoritiesMapper.mapScopesToAuthorities(null, null, null);
+ }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("The scopes argument must not be null.");
+ assertThatThrownBy(() -> {
+ authoritiesMapper.mapScopesToAuthorities("myClientId", null, null);
+ }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining("The scopes argument must not be null.");
+ }
+
+ @Test
+ void that7AuthoritiesAreReturned() throws Exception {
+ DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper("uaa", false);
+ Set authorities = authoritiesMapper.mapScopesToAuthorities("uaa", Collections.emptySet(), null);
+
+ assertThat(authorities).hasSize(7);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW", "ROLE_DEPLOY", "ROLE_MODIFY",
+ "ROLE_SCHEDULE", "ROLE_DESTROY");
+ }
+
+ @Test
+ void emptyMapConstructor() throws Exception {
+ Set scopes = new HashSet<>();
+ scopes.add("dataflow.manage");
+ scopes.add("dataflow.view");
+ scopes.add("dataflow.create");
+
+ DefaultAuthoritiesMapper authoritiesMapper = new DefaultAuthoritiesMapper("uaa", true);
+ Collection extends GrantedAuthority> authorities = authoritiesMapper.mapScopesToAuthorities("uaa", scopes, null);
+
+ assertThat(authorities).hasSize(3);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW");
+ }
+
+ @Test
+ void mapConstructorWithIncompleteRoleMappings() throws Exception {
+ ProviderRoleMapping roleMapping = new ProviderRoleMapping();
+ roleMapping.setMapOauthScopes(true);
+ roleMapping.addRoleMapping("ROLE_MANAGE", "foo-scope-in-oauth");
+ assertThatThrownBy(() -> {
+ new DefaultAuthoritiesMapper("uaa", roleMapping);
+ }).isInstanceOf(IllegalArgumentException.class).hasMessageContaining(
+ "The following 6 roles are not mapped: CREATE, DEPLOY, DESTROY, MODIFY, SCHEDULE, VIEW.");
+ }
+
+ @Test
+ void that3MappedAuthoritiesAreReturned() throws Exception {
+ Map roleMappings = Map.of(
+ "ROLE_MANAGE", "dataflow_manage",
+ "ROLE_VIEW", "dataflow_view",
+ "ROLE_CREATE", "dataflow_create",
+ "ROLE_MODIFY", "dataflow_modify",
+ "ROLE_DEPLOY", "dataflow_deploy",
+ "ROLE_DESTROY", "dataflow_destroy",
+ "ROLE_SCHEDULE", "dataflow_schedule"
+ );
+
+ ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping();
+ providerRoleMapping.setMapOauthScopes(true);
+ providerRoleMapping.getRoleMappings().putAll(roleMappings);
+
+ Set roles = Set.of("dataflow_manage", "dataflow_view", "dataflow_deploy");
+
+ DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa",
+ roles, null);
+
+ assertThat(authorities).hasSize(3);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_DEPLOY", "ROLE_MANAGE", "ROLE_VIEW");
+ }
+ @Test
+ void that7MappedAuthoritiesAreReturned() throws Exception {
+ Map roleMappings = Map.of(
+ "ROLE_MANAGE", "foo-manage",
+ "ROLE_VIEW", "bar-view",
+ "ROLE_CREATE", "blubba-create",
+ "ROLE_MODIFY", "foo-modify",
+ "ROLE_DEPLOY", "foo-deploy",
+ "ROLE_DESTROY", "foo-destroy",
+ "ROLE_SCHEDULE", "foo-schedule"
+ );
+
+ ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping();
+ providerRoleMapping.setMapOauthScopes(true);
+ providerRoleMapping.getRoleMappings().putAll(roleMappings);
+
+ Set scopes = Set.of(
+ "foo-manage",
+ "bar-view",
+ "blubba-create",
+ "foo-modify",
+ "foo-deploy",
+ "foo-destroy",
+ "foo-schedule"
+ );
+
+ DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa",
+ scopes, null);
+
+ assertThat(authorities).hasSize(7);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY",
+ "ROLE_SCHEDULE", "ROLE_VIEW");
+ }
+
+ @Test
+ void that3MappedAuthoritiesAreReturnedForDefaultMapping() throws Exception {
+ ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping();
+ providerRoleMapping.setMapOauthScopes(true);
+
+ Set scopes = Set.of(
+ "dataflow.manage",
+ "dataflow.view",
+ "dataflow.create"
+ );
+
+ DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", providerRoleMapping);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa",
+ scopes, null);
+
+ assertThat(authorities).hasSize(3);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_MANAGE", "ROLE_CREATE", "ROLE_VIEW");
+ }
+
+ @Test
+ void that7MappedAuthoritiesAreReturnedForDefaultMappingWithoutMappingScopes() throws Exception {
+ Set scopes = Set.of(
+ "dataflow.manage",
+ "dataflow.view",
+ "dataflow.create"
+ );
+
+ DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", false);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa",
+ scopes, null);
+
+ assertThat(authorities).hasSize(7);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY",
+ "ROLE_SCHEDULE", "ROLE_VIEW");
+ }
+
+ @Test
+ void that2MappedAuthoritiesAreReturnedForDefaultMapping() throws Exception {
+ Set scopes = Set.of(
+ "dataflow.view",
+ "dataflow.create"
+ );
+
+ DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa",
+ scopes, null);
+
+ assertThat(authorities).hasSize(2);
+ assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList()))
+ .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_VIEW");
+ }
+
+ @Test
+ void that7AuthoritiesAreReturnedAndOneOAuthScopeCoversMultipleServerRoles() throws Exception {
+ Map roleMappings = Map.of(
+ "ROLE_MANAGE", "foo-manage",
+ "ROLE_VIEW", "foo-manage",
+ "ROLE_DEPLOY", "foo-manage",
+ "ROLE_DESTROY", "foo-manage",
+ "ROLE_MODIFY", "foo-manage",
+ "ROLE_SCHEDULE", "foo-manage",
+ "ROLE_CREATE", "blubba-create"
+ );
+
+ Set scopes = Set.of("foo-manage", "blubba-create");
+
+ DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true, roleMappings);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa",
+ scopes, null);
+
+ assertThat(authorities).hasSize(7);
+ assertThat(authorities.stream().map(authority -> authority.getAuthority()).collect(Collectors.toList()))
+ .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY",
+ "ROLE_SCHEDULE", "ROLE_VIEW");
+ }
+
+ @Test
+ void thatUriStyleScopeRemovesLeadingPart() throws Exception {
+ Map roleMappings = Map.of(
+ "ROLE_MANAGE", "foo-manage",
+ "ROLE_VIEW", "foo-manage",
+ "ROLE_DEPLOY", "foo-manage",
+ "ROLE_DESTROY", "foo-manage",
+ "ROLE_MODIFY", "foo-manage",
+ "ROLE_SCHEDULE", "foo-manage",
+ "ROLE_CREATE", "blubba-create"
+ );
+
+ Set scopes = Set.of("api://foobar/foo-manage", "blubba-create");
+
+ DefaultAuthoritiesMapper defaultAuthoritiesExtractor = new DefaultAuthoritiesMapper("uaa", true, roleMappings);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesExtractor.mapScopesToAuthorities("uaa",
+ scopes, null);
+
+ assertThat(authorities).hasSize(7);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY",
+ "ROLE_SCHEDULE", "ROLE_VIEW");
+ }
+
+ @Test
+ void thatUriStyleScopeParsingCanBeDisabled() throws Exception {
+ Map roleMappings = Map.of(
+ "ROLE_MANAGE", "/ROLE/2000803042",
+ "ROLE_VIEW", "/ROLE/2000803036",
+ "ROLE_DEPLOY", "/ROLE/2000803039",
+ "ROLE_DESTROY", "/ROLE/20008030340",
+ "ROLE_MODIFY", "/ROLE/2000803037",
+ "ROLE_SCHEDULE", "/ROLE/2000803038",
+ "ROLE_CREATE", "/ROLE/2000803041"
+ );
+
+ ProviderRoleMapping providerRoleMapping = new ProviderRoleMapping();
+ providerRoleMapping.setMapOauthScopes(true);
+ providerRoleMapping.setParseOauthScopePathParts(false);
+ providerRoleMapping.getRoleMappings().putAll(roleMappings);
+
+ Set scopes = Set.of(
+ "/ROLE/2000803042",
+ "/ROLE/2000803036",
+ "/ROLE/2000803039",
+ "/ROLE/20008030340",
+ "/ROLE/2000803037",
+ "/ROLE/2000803038",
+ "/ROLE/2000803041"
+ );
+
+ DefaultAuthoritiesMapper defaultAuthoritiesMapper = new DefaultAuthoritiesMapper("uaa", providerRoleMapping);
+ Collection extends GrantedAuthority> authorities = defaultAuthoritiesMapper.mapScopesToAuthorities("uaa",
+ scopes, null);
+
+ assertThat(authorities).hasSize(7);
+ assertThat(authorities)
+ .extracting(GrantedAuthority::getAuthority)
+ .containsExactlyInAnyOrder("ROLE_CREATE", "ROLE_DEPLOY", "ROLE_DESTROY", "ROLE_MANAGE", "ROLE_MODIFY",
+ "ROLE_SCHEDULE", "ROLE_VIEW");
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java
new file mode 100644
index 0000000000..cdd9600a4a
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/ExternalOauth2ResourceAuthoritiesMapperTests.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2018-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.HashSet;
+import java.util.Set;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import okhttp3.mockwebserver.MockResponse;
+import okhttp3.mockwebserver.MockWebServer;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+
+/**
+ * @author Mike Heath
+ * @author Gunnar Hillert
+ * @author Corneil du Plessis
+ */
+public class ExternalOauth2ResourceAuthoritiesMapperTests {
+
+ public MockWebServer mockBackEnd;
+
+
+ @Before
+ public void setUp() throws IOException {
+ mockBackEnd = new MockWebServer();
+ mockBackEnd.start();
+ }
+ @After
+ public void tearDown() throws IOException {
+ mockBackEnd.shutdown();
+ }
+
+
+ @Test
+ public void testExtractAuthorities() throws Exception {
+ assertAuthorities2(mockBackEnd.url("https://github.com/authorities").uri(), "VIEW");
+ assertAuthorities2(mockBackEnd.url("https://github.com/authorities").uri(), "VIEW", "CREATE", "MANAGE");
+ assertAuthorities2(mockBackEnd.url("https://github.com/").uri(), "MANAGE");
+ assertAuthorities2(mockBackEnd.url("https://github.com/").uri(), "DEPLOY", "DESTROY", "MODIFY", "SCHEDULE");
+ assertThat(mockBackEnd.getRequestCount()).isEqualTo(4);
+ }
+
+ private void assertAuthorities2(URI uri, String... roles) throws Exception {
+ ObjectMapper objectMapper = new ObjectMapper();
+ mockBackEnd.enqueue(new MockResponse()
+ .setBody(objectMapper.writeValueAsString(roles))
+ .addHeader("Content-Type", "application/json"));
+
+ final ExternalOauth2ResourceAuthoritiesMapper authoritiesExtractor =
+ new ExternalOauth2ResourceAuthoritiesMapper(uri);
+ final Set grantedAuthorities = authoritiesExtractor.mapScopesToAuthorities(null, new HashSet<>(), "1234567");
+ for (String role : roles) {
+ assertThat(grantedAuthorities).containsAnyOf(new SimpleGrantedAuthority(SecurityConfigUtils.ROLE_PREFIX + role));
+ }
+ assertThat(mockBackEnd.takeRequest().getHeaders().get("Authorization")).isEqualTo("Bearer 1234567");
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java
new file mode 100644
index 0000000000..ac5fb55274
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/MappingJwtGrantedAuthoritiesConverterTests.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2020-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.common.security.support;
+
+import java.time.Instant;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.jupiter.api.Test;
+
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.oauth2.jwt.Jwt;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Tests for {@link MappingJwtGrantedAuthoritiesConverter}
+ *
+ */
+public class MappingJwtGrantedAuthoritiesConverterTests {
+
+ public static Jwt.Builder jwt() {
+ return Jwt.withTokenValue("token")
+ .header("alg", "none")
+ .audience(Arrays.asList("https://audience.example.org/"))
+ .expiresAt(Instant.MAX)
+ .issuedAt(Instant.MIN)
+ .issuer("https://issuer.example.org/")
+ .jti("jti")
+ .notBefore(Instant.MIN)
+ .subject("mock-test-subject");
+ }
+
+ public static Jwt user() {
+ return jwt()
+ .claim("sub", "mock-test-subject")
+ .build();
+ }
+
+ @Test
+ public void convertWhenTokenHasScopeAttributeThenTranslatedToAuthorities() {
+ Jwt jwt = jwt().claim("scope", "message:read message:write").build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactlyInAnyOrder(
+ new SimpleGrantedAuthority("SCOPE_message:read"),
+ new SimpleGrantedAuthority("SCOPE_message:write"));
+ }
+
+ @Test
+ public void convertWithCustomAuthorityPrefixWhenTokenHasScopeAttributeThenTranslatedToAuthoritiesViaMapping() {
+ Jwt jwt = jwt().claim("scope", "message:read message:write").build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_");
+ Map authoritiesMapping = new HashMap<>();
+ authoritiesMapping.put("READ", "message:read");
+ authoritiesMapping.put("WRITE", "message:write");
+ jwtGrantedAuthoritiesConverter.setAuthoritiesMapping(authoritiesMapping);
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactly(
+ new SimpleGrantedAuthority("ROLE_READ"),
+ new SimpleGrantedAuthority("ROLE_WRITE"));
+ }
+
+ @Test
+ public void convertWithCustomAuthorityWhenTokenHasScopeAttributeThenTranslatedToAuthoritiesViaMapping() {
+ Jwt jwt = jwt().claim("scope", "message:read message:write").build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ jwtGrantedAuthoritiesConverter.setAuthorityPrefix("");
+ Map authoritiesMapping = new HashMap<>();
+ authoritiesMapping.put("ROLE_READ", "message:read");
+ authoritiesMapping.put("ROLE_WRITE", "message:write");
+ jwtGrantedAuthoritiesConverter.setAuthoritiesMapping(authoritiesMapping);
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactly(
+ new SimpleGrantedAuthority("ROLE_READ"),
+ new SimpleGrantedAuthority("ROLE_WRITE"));
+ }
+
+ @Test
+ public void convertWithCustomAuthorityPrefixWhenTokenHasScopeAttributeThenTranslatedToAuthorities() {
+ Jwt jwt = jwt().claim("scope", "message:read message:write").build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_");
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactlyInAnyOrder(
+ new SimpleGrantedAuthority("ROLE_message:read"),
+ new SimpleGrantedAuthority("ROLE_message:write"));
+ }
+
+ @Test
+ public void convertWhenTokenHasEmptyScopeAttributeThenTranslatedToNoAuthorities() {
+ Jwt jwt = jwt().claim("scope", "").build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasScpAttributeThenTranslatedToAuthorities() {
+ Jwt jwt = jwt().claim("scp", Arrays.asList("message:read", "message:write")).build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactlyInAnyOrder(
+ new SimpleGrantedAuthority("SCOPE_message:read"),
+ new SimpleGrantedAuthority("SCOPE_message:write"));
+ }
+
+ @Test
+ public void convertWithCustomAuthorityPrefixWhenTokenHasScpAttributeThenTranslatedToAuthorities() {
+ Jwt jwt = jwt().claim("scp", Arrays.asList("message:read", "message:write")).build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ jwtGrantedAuthoritiesConverter.setAuthorityPrefix("ROLE_");
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactlyInAnyOrder(
+ new SimpleGrantedAuthority("ROLE_message:read"),
+ new SimpleGrantedAuthority("ROLE_message:write"));
+ }
+
+ @Test
+ public void convertWhenTokenHasEmptyScpAttributeThenTranslatedToNoAuthorities() {
+ Jwt jwt = jwt().claim("scp", Collections.emptyList()).build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasBothScopeAndScpThenScopeAttributeIsTranslatedToAuthorities() {
+ Jwt jwt = jwt()
+ .claim("scp", Arrays.asList("message:read", "message:write"))
+ .claim("scope", "missive:read missive:write")
+ .build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactly(
+ new SimpleGrantedAuthority("SCOPE_missive:read"),
+ new SimpleGrantedAuthority("SCOPE_missive:write"));
+ }
+
+ @Test
+ public void convertWhenTokenHasEmptyScopeAndNonEmptyScpThenScopeAttributeIsTranslatedToNoAuthorities() {
+ Jwt jwt = jwt()
+ .claim("scp", Arrays.asList("message:read", "message:write"))
+ .claim("scope", "")
+ .build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasEmptyScopeAndEmptyScpAttributeThenTranslatesToNoAuthorities() {
+ Jwt jwt = jwt()
+ .claim("scp", Collections.emptyList())
+ .claim("scope", Collections.emptyList())
+ .build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasNoScopeAndNoScpAttributeThenTranslatesToNoAuthorities() {
+ Jwt jwt = jwt().claim("xxx", Arrays.asList("message:read", "message:write")).build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasUnsupportedTypeForScopeThenTranslatesToNoAuthorities() {
+ Jwt jwt = jwt().claim("scope", new String[] {"message:read", "message:write"}).build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToAuthorities() {
+ Jwt jwt = jwt()
+ .claim("xxx", Arrays.asList("message:read", "message:write"))
+ .claim("scope", "missive:read missive:write")
+ .build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("xxx");
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).containsExactlyInAnyOrder(
+ new SimpleGrantedAuthority("SCOPE_message:read"),
+ new SimpleGrantedAuthority("SCOPE_message:write"));
+ }
+
+ @Test
+ public void convertWhenTokenHasEmptyCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToNoAuthorities() {
+ Jwt jwt = jwt()
+ .claim("roles", Collections.emptyList())
+ .claim("scope", "missive:read missive:write")
+ .build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("roles");
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasNoCustomClaimNameThenCustomClaimNameAttributeIsTranslatedToNoAuthorities() {
+ Jwt jwt = jwt().claim("scope", "missive:read missive:write").build();
+
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ jwtGrantedAuthoritiesConverter.setAuthoritiesClaimName("roles");
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+
+ assertThat(authorities).isEmpty();
+ }
+
+ @Test
+ public void convertWhenTokenHasGroupClaims() {
+ Jwt jwt = jwt().claim("groups", Arrays.asList("role1")).build();
+ MappingJwtGrantedAuthoritiesConverter jwtGrantedAuthoritiesConverter = new MappingJwtGrantedAuthoritiesConverter();
+ Collection authorities = jwtGrantedAuthoritiesConverter.convert(jwt);
+ assertThat(authorities).containsExactlyInAnyOrder(new SimpleGrantedAuthority("SCOPE_role1"));
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java
new file mode 100644
index 0000000000..d0aa68a8c2
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-common-security-config-web/src/test/java/org/springframework/cloud/common/security/support/OAuth2TokenUtilsServiceTests.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2019-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security.support;
+
+import java.time.Instant;
+
+import org.junit.jupiter.api.Test;
+
+import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
+import org.springframework.security.oauth2.client.OAuth2AuthorizedClientService;
+import org.springframework.security.oauth2.client.authentication.OAuth2AuthenticationToken;
+import org.springframework.security.oauth2.client.registration.ClientRegistration;
+import org.springframework.security.oauth2.core.AuthorizationGrantType;
+import org.springframework.security.oauth2.core.OAuth2AccessToken;
+import org.springframework.security.oauth2.core.OAuth2AccessToken.TokenType;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+/**
+ *
+ * @author Gunnar Hillert
+ *
+ */
+public class OAuth2TokenUtilsServiceTests {
+
+ @Test
+ public void testGetAccessTokenOfAuthenticatedUserWithNoAuthentication() {
+ SecurityContextHolder.getContext().setAuthentication(null);
+
+ final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class);
+ OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService);
+
+ assertThatThrownBy(() -> {
+ oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser();
+ }).isInstanceOf(IllegalStateException.class).hasMessageContaining(
+ "Cannot retrieve the authentication object from the SecurityContext. Are you authenticated?");
+ }
+
+ @Test
+ public void testGetAccessTokenOfAuthenticatedUserWithWrongAuthentication() {
+ final Authentication authentication = mock(Authentication.class);
+ SecurityContextHolder.getContext().setAuthentication(authentication);
+
+ final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class);
+ OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService);
+
+ assertThatThrownBy(() -> {
+ oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser();
+ }).isInstanceOf(IllegalStateException.class).hasMessageContaining("Unsupported authentication object type");
+ SecurityContextHolder.getContext().setAuthentication(null);
+ }
+
+ @Test
+ public void testGetAccessTokenOfAuthenticatedUserWithEmptyPrincipalName() {
+ final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class);
+ when(authentication.getName()).thenReturn("");
+ when(authentication.getAuthorizedClientRegistrationId()).thenReturn("uaa");
+ SecurityContextHolder.getContext().setAuthentication(authentication);
+
+ final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class);
+ OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService);
+
+ assertThatThrownBy(() -> {
+ oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser();
+ }).isInstanceOf(IllegalStateException.class)
+ .hasMessageContaining("The retrieved principalName must not be null or empty.");
+ SecurityContextHolder.getContext().setAuthentication(null);
+ }
+
+ @Test
+ public void testGetAccessTokenOfAuthenticatedUserWithEmptyClientRegistrationId() {
+ final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class);
+ when(authentication.getName()).thenReturn("FOO");
+ when(authentication.getAuthorizedClientRegistrationId()).thenReturn("");
+ SecurityContextHolder.getContext().setAuthentication(authentication);
+
+ final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class);
+ OAuth2TokenUtilsService oAuth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService);
+
+ assertThatThrownBy(() -> {
+ oAuth2TokenUtilsService.getAccessTokenOfAuthenticatedUser();
+ }).isInstanceOf(IllegalStateException.class)
+ .hasMessageContaining("The retrieved clientRegistrationId must not be null or empty.");
+ SecurityContextHolder.getContext().setAuthentication(null);
+ }
+
+ @Test
+ public void testGetAccessTokenOfAuthenticatedUserWithWrongClientRegistrationId() {
+ final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class);
+ when(authentication.getName()).thenReturn("my-username");
+ when(authentication.getAuthorizedClientRegistrationId()).thenReturn("CID");
+ SecurityContextHolder.getContext().setAuthentication(authentication);
+
+ final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class);
+ when(oauth2AuthorizedClientService.loadAuthorizedClient("uaa", "my-username")).thenReturn(getOAuth2AuthorizedClient());
+ final OAuth2TokenUtilsService oauth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService);
+
+ assertThatThrownBy(() -> {
+ oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser();
+ }).isInstanceOf(IllegalStateException.class).hasMessageContaining(
+ "No oauth2AuthorizedClient returned for clientRegistrationId 'CID' and principalName 'my-username'.");
+ SecurityContextHolder.getContext().setAuthentication(null);
+ }
+
+ @Test
+ public void testGetAccessTokenOfAuthenticatedUserWithAuthentication() {
+ final OAuth2AuthenticationToken authentication = mock(OAuth2AuthenticationToken.class);
+ when(authentication.getName()).thenReturn("my-username");
+ when(authentication.getAuthorizedClientRegistrationId()).thenReturn("uaa");
+ SecurityContextHolder.getContext().setAuthentication(authentication);
+
+ final OAuth2AuthorizedClientService oauth2AuthorizedClientService = mock(OAuth2AuthorizedClientService.class);
+ when(oauth2AuthorizedClientService.loadAuthorizedClient("uaa", "my-username")).thenReturn(getOAuth2AuthorizedClient());
+ final OAuth2TokenUtilsService oauth2TokenUtilsService = new DefaultOAuth2TokenUtilsService(oauth2AuthorizedClientService);
+
+ assertThat(oauth2TokenUtilsService.getAccessTokenOfAuthenticatedUser()).isEqualTo("foo-bar-123-token");
+ SecurityContextHolder.getContext().setAuthentication(null);
+ }
+
+ private OAuth2AuthorizedClient getOAuth2AuthorizedClient() {
+ final ClientRegistration clientRegistration = ClientRegistration
+ .withRegistrationId("uaa")
+ .clientId("clientId")
+ .clientSecret("clientSecret")
+ .redirectUri("blubba")
+ .authorizationUri("blubba")
+ .tokenUri("blubba")
+ .authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE)
+ .build();
+ final OAuth2AccessToken accessToken = new OAuth2AccessToken(TokenType.BEARER, "foo-bar-123-token", Instant.now(), Instant.now().plusMillis(100000));
+ final OAuth2AuthorizedClient authorizedClient = new OAuth2AuthorizedClient(clientRegistration, "my-username", accessToken);
+ return authorizedClient;
+ }
+
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml
new file mode 100644
index 0000000000..b0148eeee2
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/pom.xml
@@ -0,0 +1,49 @@
+
+
+ 4.0.0
+
+ org.springframework.cloud
+ spring-cloud-common-security-config
+ 3.0.0-SNAPSHOT
+
+ spring-cloud-starter-common-security-config-web
+ spring-cloud-starter-common-security-config-web
+ Spring Cloud Starter Common Security Config Web
+ pom
+
+ true
+
+
+
+ org.springframework.cloud
+ spring-cloud-common-security-config-web
+ ${project.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-web
+ test
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+ 3.3.0
+
+
+
+ test-jar
+
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java
similarity index 61%
rename from spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java
rename to spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java
index 71c096cc1d..df2f761d8e 100644
--- a/spring-cloud-dataflow-completion/src/test/support/common/src/main/java/com/acme/common/SomeEnum.java
+++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityApplicationTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016 the original author or authors.
+ * Copyright 2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,17 +13,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+package org.springframework.cloud.common.security;
-package com.acme.common;
+import org.junit.jupiter.api.Test;
+
+import org.springframework.boot.test.context.SpringBootTest;
/**
- * An enum class used in {@link ConfigProperties}. Useful to test, because this class has
- * to be accessible to the ClassLoader used to retrieve metadata.
+ * Testing startup and configuration
*
- * @author Eric Bottard
+ * @author Corneil du Plessis
*/
-public enum SomeEnum {
- one,
- two,
- three;
+@SpringBootTest
+class SpringCloudCommonSecurityApplicationTests {
+
+ @Test
+ void contextLoads() {
+ }
+
}
diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java
new file mode 100644
index 0000000000..08c8855d75
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/SpringCloudCommonSecurityTestApplication.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import java.security.Principal;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.actuate.autoconfigure.metrics.MetricsAutoConfiguration;
+import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
+import org.springframework.boot.autoconfigure.security.servlet.UserDetailsServiceAutoConfiguration;
+import org.springframework.boot.autoconfigure.session.SessionAutoConfiguration;
+import org.springframework.context.annotation.Import;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+/**
+ * Minimal application to verify configuration
+ *
+ * @author Corneil du Plessis
+ */
+@SpringBootApplication(exclude = {
+ MetricsAutoConfiguration.class,
+ ManagementWebSecurityAutoConfiguration.class,
+ SecurityAutoConfiguration.class,
+ UserDetailsServiceAutoConfiguration.class,
+ SessionAutoConfiguration.class
+})
+
+@Import({CommonSecurityAutoConfiguration.class, TestOAuthSecurityConfiguration.class})
+public class SpringCloudCommonSecurityTestApplication {
+
+ public static void main(String[] args) {
+ SpringApplication.run(SpringCloudCommonSecurityTestApplication.class, args);
+ }
+
+ @RestController
+ public static class SimpleController {
+ @GetMapping("/user")
+ public String getUser(Principal principal) {
+ return principal.getName();
+ }
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java
new file mode 100644
index 0000000000..0b1b2ea2e8
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/java/org/springframework/cloud/common/security/TestOAuthSecurityConfiguration.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.common.security;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.cloud.common.security.support.OnOAuth2SecurityEnabled;
+import org.springframework.cloud.common.security.support.SecurityStateBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Conditional;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Import;
+
+/**
+ * We need to mimic the configuration of Dataflow and Skipper
+ *
+ * @author Corneil du Plessis
+ */
+@Configuration(proxyBeanMethods = false)
+@Conditional(OnOAuth2SecurityEnabled.class)
+@Import(TestOAuthSecurityConfiguration.SecurityStateBeanConfig.class)
+public class TestOAuthSecurityConfiguration extends OAuthSecurityConfiguration {
+
+ @Configuration(proxyBeanMethods = false)
+ public static class SecurityStateBeanConfig {
+ @Bean
+ public SecurityStateBean securityStateBean() {
+ return new SecurityStateBean();
+ }
+
+ @Bean
+ @ConfigurationProperties(prefix = "spring.cloud.common.security.test.authorization")
+ public AuthorizationProperties authorizationProperties() {
+ return new AuthorizationProperties();
+ }
+ }
+}
diff --git a/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml
new file mode 100644
index 0000000000..e5de703119
--- /dev/null
+++ b/spring-cloud-common-security-config/spring-cloud-starter-common-security-config-web/src/test/resources/application.yml
@@ -0,0 +1,40 @@
+logging:
+# file:
+# name: sccsc-test.log
+ level:
+ org.springframework: DEBUG
+spring:
+ security:
+ oauth2:
+ client:
+ registration:
+ uaa:
+ redirect-uri: '{baseUrl}/login/oauth2/code/{registrationId}'
+ authorization-grant-type: authorization_code
+ client-id: myclient
+ client-secret: mysecret
+ access-token-uri: http://127.0.0.1:8888/oauth/token
+ user-authorization-uri: http://127.0.0.1:8888/oauth/authorize
+ provider:
+ uaa:
+ authorization-uri: http://127.0.0.1:8888/oauth/authorize
+ user-info-uri: http://127.0.0.1:8888/me
+ token-uri: http://127.0.0.1:8888/oauth/token
+ resourceserver:
+ opaquetoken:
+ introspection-uri: http://127.0.0.1:8888/oauth/check_token
+ client-id: myclient
+ client-secret: mysecret
+ cloud:
+ common:
+ security:
+ test:
+ authorization:
+ check-token-access: isAuthenticated()
+ authorization:
+ enabled: true
+ permit-all-paths: "/user,./assets/**,/dashboard/logout-success-oauth.html"
+ authenticated-paths: "/user"
+ rules:
+ # User
+ - GET /user => hasRole('ROLE_VIEW')
diff --git a/spring-cloud-dataflow-audit/pom.xml b/spring-cloud-dataflow-audit/pom.xml
index 1a4f7ddff6..2adea4114f 100644
--- a/spring-cloud-dataflow-audit/pom.xml
+++ b/spring-cloud-dataflow-audit/pom.xml
@@ -1,21 +1,36 @@
-
+4.0.0spring-cloud-dataflow-parentorg.springframework.cloud
- 2.8.0-SNAPSHOT
+ 3.0.0-SNAPSHOT
+ ../spring-cloud-dataflow-parentspring-cloud-dataflow-audit
+ spring-cloud-dataflow-audit
+ Spring Cloud Data Flow Audit
+
jar
+
+ true
+ 3.4.1
+
+
+ jakarta.persistence
+ jakarta.persistence-api
+ org.springframework.cloudspring-cloud-dataflow-core
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-rest-resource
+ ${project.version}org.springframework.boot
@@ -23,4 +38,45 @@
test
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.11.0
+
+ true
+ ${java.version}
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ ${maven-javadoc-plugin.version}
+
+
+ javadoc
+
+ jar
+
+ package
+
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 3.3.0
+
+
+ source
+
+ jar
+
+ package
+
+
+
+
+
diff --git a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java
index 70f4d2ea12..350b2b15dc 100644
--- a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java
+++ b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/repository/jpa/AuditRecordRepositoryImpl.java
@@ -19,14 +19,15 @@
import java.util.ArrayList;
import java.util.List;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.Path;
-import javax.persistence.criteria.Predicate;
-import javax.persistence.criteria.Root;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.criteria.CriteriaBuilder;
+import jakarta.persistence.criteria.CriteriaQuery;
+import jakarta.persistence.criteria.Path;
+import jakarta.persistence.criteria.Predicate;
+import jakarta.persistence.criteria.Root;
+import org.hibernate.query.sqm.tree.select.SqmSelectStatement;
import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepositoryCustom;
import org.springframework.cloud.dataflow.core.AuditActionType;
@@ -121,14 +122,7 @@ else if (fromDate != null && toDate != null) {
final List resultList = typedQuery.getResultList();
- final CriteriaQuery countQuery = cb.createQuery(Long.class);
- countQuery.select(cb.count(countQuery.from(AuditRecord.class)));
-
- if (!finalQueryPredicates.isEmpty()) {
- countQuery.where(finalQueryPredicates.toArray(new Predicate[0]));
- }
-
- final Long totalCount = entityManager.createQuery(countQuery)
+ final Long totalCount = (Long)entityManager.createQuery(((SqmSelectStatement)select).createCountQuery())
.getSingleResult();
return new PageImpl<>(resultList, pageable, totalCount);
diff --git a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java
index 7eff50695e..84eccdc2df 100644
--- a/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java
+++ b/spring-cloud-dataflow-audit/src/main/java/org/springframework/cloud/dataflow/audit/service/DefaultAuditRecordService.java
@@ -16,6 +16,7 @@
package org.springframework.cloud.dataflow.audit.service;
import java.time.Instant;
+import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@@ -26,6 +27,7 @@
import org.slf4j.LoggerFactory;
import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository;
+import org.springframework.cloud.dataflow.core.ArgumentSanitizer;
import org.springframework.cloud.dataflow.core.AuditActionType;
import org.springframework.cloud.dataflow.core.AuditOperationType;
import org.springframework.cloud.dataflow.core.AuditRecord;
@@ -38,74 +40,107 @@
*
* @author Gunnar Hillert
* @author Daniel Serleg
+ * @author Corneil du Plessis
*/
public class DefaultAuditRecordService implements AuditRecordService {
- private static final Logger logger = LoggerFactory.getLogger(DefaultAuditRecordService.class);
-
- private final AuditRecordRepository auditRecordRepository;
-
- private final ObjectMapper objectMapper;
-
- public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository) {
- Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null.");
- this.auditRecordRepository = auditRecordRepository;
- this.objectMapper = new ObjectMapper();
- this.objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
- }
-
- public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository, ObjectMapper objectMapper) {
- Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null.");
- Assert.notNull(objectMapper, "objectMapper must not be null.");
- this.auditRecordRepository = auditRecordRepository;
- this.objectMapper = objectMapper;
- }
-
- @Override
- public AuditRecord populateAndSaveAuditRecord(AuditOperationType auditOperationType,
- AuditActionType auditActionType,
- String correlationId, String data, String platformName) {
- Assert.notNull(auditActionType, "auditActionType must not be null.");
- Assert.notNull(auditOperationType, "auditOperationType must not be null.");
-
- final AuditRecord auditRecord = new AuditRecord();
- auditRecord.setAuditAction(auditActionType);
- auditRecord.setAuditOperation(auditOperationType);
- auditRecord.setCorrelationId(correlationId);
- auditRecord.setAuditData(data);
- auditRecord.setPlatformName(platformName);
- return this.auditRecordRepository.save(auditRecord);
- }
-
- @Override
- public AuditRecord populateAndSaveAuditRecordUsingMapData(AuditOperationType auditOperationType,
- AuditActionType auditActionType,
- String correlationId, Map data, String platformName) {
- String dataAsString;
- try {
- dataAsString = objectMapper.writeValueAsString(data);
- }
- catch (JsonProcessingException e) {
- logger.error("Error serializing audit record data. Data = " + data);
- dataAsString = "Error serializing audit record data. Data = " + data;
- }
- return this.populateAndSaveAuditRecord(auditOperationType, auditActionType, correlationId, dataAsString, platformName);
- }
-
- @Override
- public Page findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(
- Pageable pageable,
- AuditActionType[] actions,
- AuditOperationType[] operations,
- Instant fromDate,
- Instant toDate) {
- return this.auditRecordRepository.findByActionTypeAndOperationTypeAndDate(operations, actions, fromDate, toDate,
- pageable);
- }
-
- @Override
- public Optional findById(Long id) {
- return this.auditRecordRepository.findById(id);
- }
+ private static final Logger logger = LoggerFactory.getLogger(DefaultAuditRecordService.class);
+
+ private final AuditRecordRepository auditRecordRepository;
+
+ private final ObjectMapper objectMapper;
+
+ private final ArgumentSanitizer sanitizer;
+
+ public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository) {
+
+ this(auditRecordRepository, null);
+ }
+
+ public DefaultAuditRecordService(AuditRecordRepository auditRecordRepository, ObjectMapper objectMapper) {
+
+ Assert.notNull(auditRecordRepository, "auditRecordRepository must not be null.");
+ this.auditRecordRepository = auditRecordRepository;
+ if (objectMapper == null) {
+ objectMapper = new ObjectMapper();
+ objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
+ }
+ this.objectMapper = objectMapper;
+ this.sanitizer = new ArgumentSanitizer();
+ }
+
+ @Override
+ public AuditRecord populateAndSaveAuditRecord(AuditOperationType auditOperationType,
+ AuditActionType auditActionType,
+ String correlationId, String data, String platformName) {
+
+ Assert.notNull(auditActionType, "auditActionType must not be null.");
+ Assert.notNull(auditOperationType, "auditOperationType must not be null.");
+
+ final AuditRecord auditRecord = new AuditRecord();
+ auditRecord.setAuditAction(auditActionType);
+ auditRecord.setAuditOperation(auditOperationType);
+ auditRecord.setCorrelationId(correlationId);
+ auditRecord.setAuditData(data);
+ auditRecord.setPlatformName(platformName);
+ return this.auditRecordRepository.save(auditRecord);
+ }
+
+ @Override
+ public AuditRecord populateAndSaveAuditRecordUsingMapData(
+ AuditOperationType auditOperationType,
+ AuditActionType auditActionType,
+ String correlationId, Map data,
+ String platformName
+ ) {
+
+ String dataAsString;
+ try {
+ Map sanitizedData = sanitizeMap(data);
+ dataAsString = objectMapper.writeValueAsString(sanitizedData);
+ } catch (JsonProcessingException e) {
+ logger.error("Error serializing audit record data. Data = " + data);
+ dataAsString = "Error serializing audit record data. Data = " + data;
+ }
+ return this.populateAndSaveAuditRecord(auditOperationType, auditActionType, correlationId, dataAsString, platformName);
+ }
+
+ private Map sanitizeMap(Map data) {
+
+ final Map result = new HashMap<>();
+ data.forEach((k, v) -> result.put(k, sanitize(k, v)));
+ return result;
+ }
+
+ private Object sanitize(String key, Object value) {
+
+ if (value instanceof String) {
+ return sanitizer.sanitize(key, (String) value);
+ } else if (value instanceof Map) {
+ Map input = (Map) value;
+ return sanitizeMap(input);
+ } else {
+ return value;
+ }
+ }
+
+
+ @Override
+ public Page findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(
+ Pageable pageable,
+ AuditActionType[] actions,
+ AuditOperationType[] operations,
+ Instant fromDate,
+ Instant toDate) {
+
+ return this.auditRecordRepository.findByActionTypeAndOperationTypeAndDate(operations, actions, fromDate, toDate,
+ pageable);
+ }
+
+ @Override
+ public Optional findById(Long id) {
+
+ return this.auditRecordRepository.findById(id);
+ }
}
diff --git a/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java b/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java
index 5b12a0bf03..2d084d881e 100644
--- a/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java
+++ b/spring-cloud-dataflow-audit/src/test/java/org/springframework/cloud/dataflow/server/audit/service/DefaultAuditRecordServiceTests.java
@@ -15,13 +15,15 @@
*/
package org.springframework.cloud.dataflow.server.audit.service;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository;
@@ -32,8 +34,8 @@
import org.springframework.cloud.dataflow.core.AuditRecord;
import org.springframework.data.domain.PageRequest;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isNull;
@@ -45,218 +47,255 @@
/**
* @author Gunnar Hillert
+ * @author Corneil du Plessis
*/
-public class DefaultAuditRecordServiceTests {
-
- private AuditRecordRepository auditRecordRepository;
-
- @Before
- public void setupMock() {
- this.auditRecordRepository = mock(AuditRecordRepository.class);
- }
-
- @Test
- public void testInitializationWithNullParameters() {
- try {
- new DefaultAuditRecordService(null);
- }
- catch (IllegalArgumentException e) {
- assertEquals("auditRecordRepository must not be null.", e.getMessage());
- return;
- }
- fail("Expected an Exception to be thrown.");
- }
-
- @Test
- public void testPopulateAndSaveAuditRecord() {
- final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
- auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, AuditActionType.CREATE, "1234",
- "my data", "test-platform");
-
- final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class);
- verify(this.auditRecordRepository, times(1)).save(argument.capture());
- verifyNoMoreInteractions(this.auditRecordRepository);
-
- AuditRecord auditRecord = argument.getValue();
-
- assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction());
- assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation());
- assertEquals("1234", auditRecord.getCorrelationId());
- assertEquals("my data", auditRecord.getAuditData());
- assertEquals("test-platform", auditRecord.getPlatformName());
- }
-
- @Test
- public void testPopulateAndSaveAuditRecordWithNullAuditActionType() {
- final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
-
- try {
- auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, null, "1234", "my audit data", "test-platform");
- }
- catch (IllegalArgumentException e) {
- assertEquals("auditActionType must not be null.", e.getMessage());
- return;
- }
- fail("Expected an Exception to be thrown.");
- }
-
- @Test
- public void testPopulateAndSaveAuditRecordWithNullAuditOperationType() {
- final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
-
- try {
- auditRecordService.populateAndSaveAuditRecord(null, AuditActionType.CREATE, "1234", "my audit data", "test-platform");
- }
- catch (IllegalArgumentException e) {
- assertEquals("auditOperationType must not be null.", e.getMessage());
- return;
- }
- fail("Expected an Exception to be thrown.");
- }
-
- @Test
- public void testPopulateAndSaveAuditRecordWithMapData() {
- final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
-
- final Map mapAuditData = new HashMap<>(2);
- mapAuditData.put("foo1", "bar1");
- mapAuditData.put("foofoo", "barbar");
-
- auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE,
- "1234", mapAuditData, "test-platform");
-
- final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class);
- verify(this.auditRecordRepository, times(1)).save(argument.capture());
- verifyNoMoreInteractions(this.auditRecordRepository);
-
- final AuditRecord auditRecord = argument.getValue();
-
- assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction());
- assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation());
- assertEquals("1234", auditRecord.getCorrelationId());
- assertEquals("{\"foofoo\":\"barbar\",\"foo1\":\"bar1\"}", auditRecord.getAuditData());
- assertEquals("test-platform", auditRecord.getPlatformName());
- }
-
- @Test
- public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditActionType() {
- final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
-
- final Map mapAuditData = new HashMap<>(2);
- mapAuditData.put("foo", "bar");
-
- try {
- auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, null, "1234",
- mapAuditData, null);
- }
- catch (IllegalArgumentException e) {
- assertEquals("auditActionType must not be null.", e.getMessage());
- return;
- }
- fail("Expected an Exception to be thrown.");
- }
-
- @Test
- public void testPopulateAndSaveAuditRecordUsingMapDataWithNullAuditOperationType() {
- final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
-
- final Map mapAuditData = new HashMap<>(2);
- mapAuditData.put("foo", "bar");
-
- try {
- auditRecordService.populateAndSaveAuditRecordUsingMapData(null, AuditActionType.CREATE, "1234",
- mapAuditData, null);
- }
- catch (IllegalArgumentException e) {
- assertEquals("auditOperationType must not be null.", e.getMessage());
- return;
- }
- fail("Expected an Exception to be thrown.");
- }
-
- @Test
- public void testPopulateAndSaveAuditRecordUsingMapDataThrowingJsonProcessingException()
- throws JsonProcessingException {
- final ObjectMapper objectMapper = mock(ObjectMapper.class);
- when(objectMapper.writeValueAsString(any(Object.class))).thenThrow(new JsonProcessingException("Error") {
- private static final long serialVersionUID = 1L;
- });
-
- final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository,
- objectMapper);
-
- final Map mapAuditData = new HashMap<>(2);
- mapAuditData.put("foo", "bar");
-
- auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE,
- "1234", mapAuditData, "test-platform");
-
- final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class);
- verify(this.auditRecordRepository, times(1)).save(argument.capture());
- verifyNoMoreInteractions(this.auditRecordRepository);
-
- AuditRecord auditRecord = argument.getValue();
-
- assertEquals(AuditActionType.CREATE, auditRecord.getAuditAction());
- assertEquals(AuditOperationType.SCHEDULE, auditRecord.getAuditOperation());
- assertEquals("1234", auditRecord.getCorrelationId());
- assertEquals("Error serializing audit record data. Data = {foo=bar}", auditRecord.getAuditData());
- assertEquals("test-platform", auditRecord.getPlatformName());
- }
-
- @Test
- public void testFindAuditRecordByAuditOperationTypeAndAuditActionType() {
- AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
-
- AuditActionType[] auditActionTypes = { AuditActionType.CREATE };
- AuditOperationType[] auditOperationTypes = { AuditOperationType.STREAM };
- PageRequest pageRequest = PageRequest.of(0, 1);
- auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes,
- auditOperationTypes, null, null);
-
- verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes),
- eq(auditActionTypes), isNull(), isNull(), eq(pageRequest));
- verifyNoMoreInteractions(this.auditRecordRepository);
- }
-
- @Test
- public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullAuditActionType() {
- AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
-
- AuditOperationType[] auditOperationTypes = { AuditOperationType.STREAM };
- PageRequest pageRequest = PageRequest.of(0, 1);
- auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null,
- auditOperationTypes, null, null);
-
- verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes),
- isNull(), isNull(), isNull(), eq(pageRequest));
- verifyNoMoreInteractions(this.auditRecordRepository);
- }
-
- @Test
- public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullOperationType() {
- AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
-
- AuditActionType[] auditActionTypes = { AuditActionType.CREATE };
- PageRequest pageRequest = PageRequest.of(0, 1);
- auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes,
- null, null, null);
-
- verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(),
- eq(auditActionTypes), isNull(), isNull(), eq(pageRequest));
- verifyNoMoreInteractions(this.auditRecordRepository);
- }
-
- @Test
- public void testFindAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullActionAndOperationType() {
- AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
-
- PageRequest pageRequest = PageRequest.of(0, 1);
- auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, null, null,
- null);
-
- verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), isNull(),
- isNull(), isNull(), eq(pageRequest));
- verifyNoMoreInteractions(this.auditRecordRepository);
- }
+class DefaultAuditRecordServiceTests {
+
+ private AuditRecordRepository auditRecordRepository;
+
+ @BeforeEach
+ void setupMock() {
+ this.auditRecordRepository = mock(AuditRecordRepository.class);
+ }
+
+ @Test
+ void initializationWithNullParameters() {
+ try {
+ new DefaultAuditRecordService(null);
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage()).isEqualTo("auditRecordRepository must not be null.");
+ return;
+ }
+ fail("Expected an Exception to be thrown.");
+ }
+
+ @Test
+ void populateAndSaveAuditRecord() {
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
+ auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, AuditActionType.CREATE, "1234",
+ "my data", "test-platform");
+
+ final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class);
+ verify(this.auditRecordRepository, times(1)).save(argument.capture());
+ verifyNoMoreInteractions(this.auditRecordRepository);
+
+ AuditRecord auditRecord = argument.getValue();
+
+ assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE);
+ assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE);
+ assertThat(auditRecord.getCorrelationId()).isEqualTo("1234");
+ assertThat(auditRecord.getAuditData()).isEqualTo("my data");
+ assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform");
+ }
+
+ @Test
+ void populateAndSaveAuditRecordWithNullAuditActionType() {
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
+
+ try {
+ auditRecordService.populateAndSaveAuditRecord(AuditOperationType.SCHEDULE, null, "1234", "my audit data", "test-platform");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage()).isEqualTo("auditActionType must not be null.");
+ return;
+ }
+ fail("Expected an Exception to be thrown.");
+ }
+
+ @Test
+ void populateAndSaveAuditRecordWithNullAuditOperationType() {
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
+
+ try {
+ auditRecordService.populateAndSaveAuditRecord(null, AuditActionType.CREATE, "1234", "my audit data", "test-platform");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage()).isEqualTo("auditOperationType must not be null.");
+ return;
+ }
+ fail("Expected an Exception to be thrown.");
+ }
+
+ @Test
+ void populateAndSaveAuditRecordWithMapData() throws JsonProcessingException {
+ final ObjectMapper mapper = new ObjectMapper();
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, mapper);
+
+ final Map mapAuditData = new HashMap<>(2);
+ mapAuditData.put("foo1", "bar1");
+ mapAuditData.put("foofoo", "barbar");
+
+ auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE,
+ "1234", mapAuditData, "test-platform");
+
+ final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class);
+ verify(this.auditRecordRepository, times(1)).save(argument.capture());
+ verifyNoMoreInteractions(this.auditRecordRepository);
+
+ final AuditRecord auditRecord = argument.getValue();
+
+ assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE);
+ assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE);
+ assertThat(auditRecord.getCorrelationId()).isEqualTo("1234");
+ assertThat(mapper.readTree(auditRecord.getAuditData())).isEqualTo(mapper.convertValue(mapAuditData, JsonNode.class));
+ assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform");
+ }
+
+ @Test
+ void populateAndSaveAuditRecordUsingMapDataWithNullAuditActionType() {
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
+
+ final Map mapAuditData = new HashMap<>(2);
+ mapAuditData.put("foo", "bar");
+
+ try {
+ auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, null, "1234",
+ mapAuditData, null);
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage()).isEqualTo("auditActionType must not be null.");
+ return;
+ }
+ fail("Expected an Exception to be thrown.");
+ }
+
+ @Test
+ void populateAndSaveAuditRecordUsingMapDataWithNullAuditOperationType() {
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository);
+
+ final Map mapAuditData = new HashMap<>(2);
+ mapAuditData.put("foo", "bar");
+
+ try {
+ auditRecordService.populateAndSaveAuditRecordUsingMapData(null, AuditActionType.CREATE, "1234",
+ mapAuditData, null);
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage()).isEqualTo("auditOperationType must not be null.");
+ return;
+ }
+ fail("Expected an Exception to be thrown.");
+ }
+
+ @Test
+ void populateAndSaveAuditRecordUsingMapDataThrowingJsonProcessingException()
+ throws JsonProcessingException {
+ final ObjectMapper objectMapper = mock(ObjectMapper.class);
+ when(objectMapper.writeValueAsString(any(Object.class))).thenThrow(new JsonProcessingException("Error") {
+ private static final long serialVersionUID = 1L;
+ });
+
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository,
+ objectMapper);
+
+ final Map mapAuditData = new HashMap<>(2);
+ mapAuditData.put("foo", "bar");
+
+ auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE,
+ "1234", mapAuditData, "test-platform");
+
+ final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class);
+ verify(this.auditRecordRepository, times(1)).save(argument.capture());
+ verifyNoMoreInteractions(this.auditRecordRepository);
+
+ AuditRecord auditRecord = argument.getValue();
+
+ assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE);
+ assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE);
+ assertThat(auditRecord.getCorrelationId()).isEqualTo("1234");
+ assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform");
+ assertThat(auditRecord.getAuditData()).isEqualTo("Error serializing audit record data. Data = {foo=bar}");
+
+
+ }
+
+ @Test
+ void populateAndSaveAuditRecordUsingSensitiveMapData() {
+ final ObjectMapper objectMapper = new ObjectMapper();
+ final AuditRecordService auditRecordService = new DefaultAuditRecordService(this.auditRecordRepository, objectMapper);
+
+ final Map mapAuditData = new HashMap<>(2);
+ mapAuditData.put("foo", "bar");
+ mapAuditData.put("spring.cloud.config.password", "12345");
+ final Map child = new HashMap<>();
+ child.put("password", "54321");
+ child.put("bar1", "foo2");
+ mapAuditData.put("spring.child", child);
+ mapAuditData.put("spring.empty", Collections.emptyMap());
+
+ auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.SCHEDULE, AuditActionType.CREATE,
+ "1234", mapAuditData, "test-platform");
+
+ final ArgumentCaptor argument = ArgumentCaptor.forClass(AuditRecord.class);
+ verify(this.auditRecordRepository, times(1)).save(argument.capture());
+ verifyNoMoreInteractions(this.auditRecordRepository);
+
+ AuditRecord auditRecord = argument.getValue();
+
+ assertThat(auditRecord.getAuditAction()).isEqualTo(AuditActionType.CREATE);
+ assertThat(auditRecord.getAuditOperation()).isEqualTo(AuditOperationType.SCHEDULE);
+ assertThat(auditRecord.getCorrelationId()).isEqualTo("1234");
+
+ assertThat(auditRecord.getPlatformName()).isEqualTo("test-platform");
+ System.out.println("auditData=" + auditRecord.getAuditData());
+ assertThat(auditRecord.getAuditData()).contains("\"******\"");
+ assertThat(auditRecord.getAuditData()).contains("\"bar\"");
+ assertThat(auditRecord.getAuditData()).contains("\"foo\"");
+ assertThat(auditRecord.getAuditData()).contains("\"spring.cloud.config.password\"");
+ assertThat(auditRecord.getAuditData()).contains("\"password\"");
+ assertThat(auditRecord.getAuditData()).doesNotContain("54321");
+ assertThat(auditRecord.getAuditData()).doesNotContain("12345");
+ }
+
+ @Test
+ void findAuditRecordByAuditOperationTypeAndAuditActionType() {
+ AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
+
+ AuditActionType[] auditActionTypes = {AuditActionType.CREATE};
+ AuditOperationType[] auditOperationTypes = {AuditOperationType.STREAM};
+ PageRequest pageRequest = PageRequest.of(0, 1);
+ auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes,
+ auditOperationTypes, null, null);
+
+ verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes),
+ eq(auditActionTypes), isNull(), isNull(), eq(pageRequest));
+ verifyNoMoreInteractions(this.auditRecordRepository);
+ }
+
+ @Test
+ void findAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullAuditActionType() {
+ AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
+
+ AuditOperationType[] auditOperationTypes = {AuditOperationType.STREAM};
+ PageRequest pageRequest = PageRequest.of(0, 1);
+ auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null,
+ auditOperationTypes, null, null);
+
+ verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(eq(auditOperationTypes),
+ isNull(), isNull(), isNull(), eq(pageRequest));
+ verifyNoMoreInteractions(this.auditRecordRepository);
+ }
+
+ @Test
+ void findAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullOperationType() {
+ AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
+
+ AuditActionType[] auditActionTypes = {AuditActionType.CREATE};
+ PageRequest pageRequest = PageRequest.of(0, 1);
+ auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, auditActionTypes,
+ null, null, null);
+
+ verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(),
+ eq(auditActionTypes), isNull(), isNull(), eq(pageRequest));
+ verifyNoMoreInteractions(this.auditRecordRepository);
+ }
+
+ @Test
+ void findAuditRecordByAuditOperationTypeAndAuditActionTypeWithNullActionAndOperationType() {
+ AuditRecordService auditRecordService = new DefaultAuditRecordService(auditRecordRepository);
+
+ PageRequest pageRequest = PageRequest.of(0, 1);
+ auditRecordService.findAuditRecordByAuditOperationTypeAndAuditActionTypeAndDate(pageRequest, null, null, null,
+ null);
+
+ verify(this.auditRecordRepository, times(1)).findByActionTypeAndOperationTypeAndDate(isNull(), isNull(),
+ isNull(), isNull(), eq(pageRequest));
+ verifyNoMoreInteractions(this.auditRecordRepository);
+ }
}
diff --git a/spring-cloud-dataflow-autoconfigure/pom.xml b/spring-cloud-dataflow-autoconfigure/pom.xml
index 6d84340829..4ae4ac0b74 100644
--- a/spring-cloud-dataflow-autoconfigure/pom.xml
+++ b/spring-cloud-dataflow-autoconfigure/pom.xml
@@ -1,15 +1,21 @@
-
+4.0.0org.springframework.cloudspring-cloud-dataflow-parent
- 2.8.0-SNAPSHOT
+ 3.0.0-SNAPSHOT
+ ../spring-cloud-dataflow-parentspring-cloud-dataflow-autoconfigurejarspring-cloud-dataflow-autoconfigureData Flow Autoconfig
+
+ true
+ 3.4.1
+ org.springframework.boot
@@ -18,10 +24,12 @@
org.springframework.cloudspring-cloud-dataflow-server-core
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-platform-kubernetes
+ ${project.version}io.fabric8
@@ -30,6 +38,7 @@
org.springframework.cloudspring-cloud-dataflow-platform-cloudfoundry
+ ${project.version}org.springframework.cloud
@@ -52,6 +61,11 @@
spring-boot-starter-testtest
+
+ com.h2database
+ h2
+ test
+
@@ -60,6 +74,7 @@
trueMETA-INF/spring.factories
+ META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
@@ -67,7 +82,46 @@
org.apache.maven.pluginsmaven-resources-plugin
+ 3.3.1
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 3.1.2
+
+ 1
+ 1
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ ${maven-javadoc-plugin.version}
+
+
+ javadoc
+
+ jar
+
+ package
+
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 3.3.0
+
+
+ source
+
+ jar
+
+ package
+
+
+
diff --git a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java
index 362d5ffc89..ebd54aa978 100644
--- a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java
+++ b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/StreamDefinitionServiceAutoConfiguration.java
@@ -16,6 +16,7 @@
package org.springframework.cloud.dataflow.autoconfigure;
+import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.cloud.dataflow.core.DefaultStreamDefinitionService;
import org.springframework.cloud.dataflow.core.StreamDefinitionService;
@@ -26,6 +27,7 @@
*
* @author Ilayaperumal Gopinathan
*/
+@AutoConfiguration
public class StreamDefinitionServiceAutoConfiguration {
diff --git a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java
index 6bc1134e82..cdbe191c3c 100644
--- a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java
+++ b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalDataFlowServerAutoConfiguration.java
@@ -19,6 +19,7 @@
import java.util.HashMap;
import java.util.Map;
+import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.cloud.dataflow.server.config.DataFlowControllerAutoConfiguration;
import org.springframework.cloud.deployer.resource.docker.DockerResourceLoader;
@@ -26,14 +27,14 @@
import org.springframework.cloud.deployer.resource.maven.MavenResourceLoader;
import org.springframework.cloud.deployer.resource.support.DelegatingResourceLoader;
import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ResourceLoader;
+
/**
* Auto-configuration for local dataflow server.
*
* @author Janne Valkealahti
*/
-@Configuration
+@AutoConfiguration
@AutoConfigureBefore(DataFlowControllerAutoConfiguration.class)
public class LocalDataFlowServerAutoConfiguration {
diff --git a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java
index 2a461d9493..112e4b9460 100644
--- a/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java
+++ b/spring-cloud-dataflow-autoconfigure/src/main/java/org/springframework/cloud/dataflow/autoconfigure/local/LocalSchedulerAutoConfiguration.java
@@ -18,6 +18,7 @@
import java.util.Collections;
import java.util.List;
+import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.cloud.dataflow.server.config.OnLocalPlatform;
import org.springframework.cloud.dataflow.server.config.features.SchedulerConfiguration;
@@ -26,13 +27,12 @@
import org.springframework.cloud.deployer.spi.scheduler.Scheduler;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
-import org.springframework.context.annotation.Configuration;
/**
* @author Mark Pollack
*/
-@Configuration
-@Conditional({ OnLocalPlatform.class, SchedulerConfiguration.SchedulerConfigurationPropertyChecker.class })
+@AutoConfiguration
+@Conditional({OnLocalPlatform.class, SchedulerConfiguration.SchedulerConfigurationPropertyChecker.class})
public class LocalSchedulerAutoConfiguration {
@Bean
diff --git a/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories
index eb58ce1aee..66237ea2d7 100644
--- a/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories
+++ b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring.factories
@@ -1,7 +1,3 @@
-org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
- org.springframework.cloud.dataflow.autoconfigure.local.LocalDataFlowServerAutoConfiguration, \
- org.springframework.cloud.dataflow.autoconfigure.local.LocalSchedulerAutoConfiguration, \
- org.springframework.cloud.dataflow.autoconfigure.StreamDefinitionServiceAutoConfiguration
org.springframework.context.ApplicationListener=\
org.springframework.cloud.dataflow.autoconfigure.local.ProfileApplicationListener
diff --git a/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
new file mode 100644
index 0000000000..c5d9f32d79
--- /dev/null
+++ b/spring-cloud-dataflow-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
@@ -0,0 +1,3 @@
+org.springframework.cloud.dataflow.autoconfigure.StreamDefinitionServiceAutoConfiguration
+org.springframework.cloud.dataflow.autoconfigure.local.LocalDataFlowServerAutoConfiguration
+org.springframework.cloud.dataflow.autoconfigure.local.LocalSchedulerAutoConfiguration
\ No newline at end of file
diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java
index 8b2a4decef..63f830647a 100644
--- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java
+++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018 the original author or authors.
+ * Copyright 2018-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,8 +18,6 @@
import io.pivotal.reactor.scheduler.ReactorSchedulerClient;
import org.cloudfoundry.operations.CloudFoundryOperations;
-import org.junit.runner.RunWith;
-import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
@@ -30,21 +28,27 @@
import org.springframework.boot.cloud.CloudPlatform;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.cloud.dataflow.registry.service.AppRegistryService;
+import org.springframework.cloud.dataflow.server.task.DataflowTaskExecutionQueryDao;
+import org.springframework.cloud.dataflow.server.task.TaskDefinitionReader;
+import org.springframework.cloud.dataflow.server.task.TaskDeploymentReader;
import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties;
import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration;
+import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties;
import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryTaskLauncher;
-import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+import static org.mockito.Mockito.mock;
/**
* @author Christian Tzolov
+ * @author Corneil du Plessis
*/
-@RunWith(SpringJUnit4ClassRunner.class)
+
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT,
classes = AbstractSchedulerPerPlatformTest.AutoConfigurationApplication.class)
@DirtiesContext
@@ -54,39 +58,58 @@ public abstract class AbstractSchedulerPerPlatformTest {
protected ApplicationContext context;
@Configuration
- @EnableAutoConfiguration(exclude = { LocalDataFlowServerAutoConfiguration.class,
+ @EnableAutoConfiguration(exclude = {LocalDataFlowServerAutoConfiguration.class,
CloudFoundryDeployerAutoConfiguration.class, SecurityAutoConfiguration.class,
- SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class })
+ SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class})
public static class AutoConfigurationApplication {
+ @Bean
+ public AppRegistryService appRegistryService() {
+ return mock(AppRegistryService.class);
+ }
+
+ @Bean
+ public TaskDefinitionReader taskDefinitionReader() {
+ return mock(TaskDefinitionReader.class);
+ }
+
+ @Bean
+ public TaskDeploymentReader taskDeploymentReader() {
+ return mock(TaskDeploymentReader.class);
+ }
+
+ @Bean
+ DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao() {
+ return mock(DataflowTaskExecutionQueryDao.class);
+ }
@Configuration
@ConditionalOnCloudPlatform(CloudPlatform.CLOUD_FOUNDRY)
public static class CloudFoundryMockConfig {
@MockBean
- protected CloudFoundrySchedulerProperties cloudFoundrySchedulerProperties;
+ protected CloudFoundryDeploymentProperties cloudFoundryDeploymentProperties;
@Bean
@Primary
public ReactorSchedulerClient reactorSchedulerClient() {
- return Mockito.mock(ReactorSchedulerClient.class);
+ return mock(ReactorSchedulerClient.class);
}
@Bean
@Primary
public CloudFoundryOperations cloudFoundryOperations() {
- return Mockito.mock(CloudFoundryOperations.class);
+ return mock(CloudFoundryOperations.class);
}
@Bean
@Primary
public CloudFoundryConnectionProperties cloudFoundryConnectionProperties() {
- return Mockito.mock(CloudFoundryConnectionProperties.class);
+ return mock(CloudFoundryConnectionProperties.class);
}
@Bean
@Primary
public CloudFoundryTaskLauncher CloudFoundryTaskLauncher() {
- return Mockito.mock(CloudFoundryTaskLauncher.class);
+ return mock(CloudFoundryTaskLauncher.class);
}
}
}
diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java
index 31eb81ea97..6587ae8f62 100644
--- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java
+++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/ProfileApplicationListenerTest.java
@@ -16,11 +16,11 @@
package org.springframework.cloud.dataflow.autoconfigure.local;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.boot.context.event.ApplicationEnvironmentPreparedEvent;
import org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryCloudProfileProvider;
@@ -35,9 +35,10 @@
* {@link ProfileApplicationListener} test cases
*
* @author Chris Schaefer
+ * @author Corneil du Plessis
*/
-@RunWith(MockitoJUnitRunner.class)
-public class ProfileApplicationListenerTest {
+@ExtendWith(MockitoExtension.class)
+class ProfileApplicationListenerTest {
private MockEnvironment environment;
@@ -46,21 +47,21 @@ public class ProfileApplicationListenerTest {
private ProfileApplicationListener profileApplicationListener;
- @Before
- public void before() {
+ @BeforeEach
+ void before() {
environment = new MockEnvironment();
when(event.getEnvironment()).thenReturn(environment);
profileApplicationListener = new ProfileApplicationListener();
}
@Test
- public void shouldEnableLocalProfile() {
+ void shouldEnableLocalProfile() {
profileApplicationListener.onApplicationEvent(event);
assertThat(environment.getActiveProfiles()).contains("local");
}
@Test
- public void shouldNotEnableLocalProfileRunningOnKubernetes() {
+ void shouldNotEnableLocalProfileRunningOnKubernetes() {
environment.setProperty("kubernetes_service_host", "true");
profileApplicationListener.onApplicationEvent(event);
assertThat(environment.getActiveProfiles()).doesNotContain("local");
@@ -68,7 +69,7 @@ public void shouldNotEnableLocalProfileRunningOnKubernetes() {
}
@Test
- public void shouldNotEnableLocalProfileRunningOnCloudFoundry() {
+ void shouldNotEnableLocalProfileRunningOnCloudFoundry() {
environment.setProperty("VCAP_APPLICATION", "true");
profileApplicationListener.onApplicationEvent(event);
assertThat(environment.getActiveProfiles()).doesNotContain("local");
@@ -76,7 +77,7 @@ public void shouldNotEnableLocalProfileRunningOnCloudFoundry() {
}
@Test
- public void testAddedSpringCloudKubernetesConfigEnabledIsFalse() {
+ void addedSpringCloudKubernetesConfigEnabledIsFalse() {
profileApplicationListener.onApplicationEvent(event);
PropertySource> propertySource = environment.getPropertySources().get("skipperProfileApplicationListener");
assertThat(propertySource.containsProperty("spring.cloud.kubernetes.enabled")).isTrue();
@@ -84,7 +85,7 @@ public void testAddedSpringCloudKubernetesConfigEnabledIsFalse() {
}
@Test
- public void backOffIfCloudProfileAlreadySet() {
+ void backOffIfCloudProfileAlreadySet() {
// kubernetes profile set by user
environment.setActiveProfiles("kubernetes");
// environment says we are on cloud foundry, the profile is 'cloud'
@@ -96,7 +97,7 @@ public void backOffIfCloudProfileAlreadySet() {
}
@Test
- public void doNotSetLocalIfKubernetesProfileIsSet() {
+ void doNotSetLocalIfKubernetesProfileIsSet() {
// kubernetes profile set by user
environment.setActiveProfiles("kubernetes");
profileApplicationListener.onApplicationEvent(event);
@@ -106,7 +107,7 @@ public void doNotSetLocalIfKubernetesProfileIsSet() {
}
@Test
- public void disableProfileApplicationListener() {
+ void disableProfileApplicationListener() {
try {
System.setProperty(ProfileApplicationListener.IGNORE_PROFILEAPPLICATIONLISTENER_PROPERTY_NAME, "true");
environment.setProperty("VCAP_APPLICATION", "true");
diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java
index 03b38f1ea5..b2ae94a741 100644
--- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java
+++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/SchedulerPerPlatformTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018 the original author or authors.
+ * Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,80 +16,82 @@
package org.springframework.cloud.dataflow.autoconfigure.local;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.boot.cloud.CloudPlatform;
import org.springframework.cloud.deployer.spi.kubernetes.KubernetesSchedulerProperties;
import org.springframework.cloud.deployer.spi.scheduler.Scheduler;
-import org.springframework.cloud.deployer.spi.scheduler.cloudfoundry.CloudFoundrySchedulerProperties;
import org.springframework.test.context.TestPropertySource;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType;
/**
* @author Christian Tzolov
+ * @author Corneil du Plessis
*/
-@RunWith(Enclosed.class)
public class SchedulerPerPlatformTest {
+ @Nested
@TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=false" })
- public static class AllSchedulerDisabledTests extends AbstractSchedulerPerPlatformTest {
+ class AllSchedulerDisabledTests extends AbstractSchedulerPerPlatformTest {
- @Test(expected = NoSuchBeanDefinitionException.class)
- public void testLocalSchedulerEnabled() {
- assertFalse(context.getEnvironment().containsProperty("kubernetes_service_host"));
- assertFalse(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment()));
- context.getBean(Scheduler.class);
+ @Test
+ void localSchedulerEnabled() {
+ assertThat(context.getEnvironment().containsProperty("kubernetes_service_host")).isFalse();
+ assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).isFalse();
+ assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() -> {
+ context.getBean(Scheduler.class);
+ });
}
}
+ @Nested
@TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true" })
- public static class LocalSchedulerTests extends AbstractSchedulerPerPlatformTest {
+ class LocalSchedulerTests extends AbstractSchedulerPerPlatformTest {
@Test
- public void testLocalSchedulerEnabled() {
- assertFalse("K8s should be disabled", context.getEnvironment().containsProperty("kubernetes_service_host"));
- assertFalse("CF should be disabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment()));
+ void localSchedulerEnabled() {
+ assertThat(context.getEnvironment().containsProperty("kubernetes_service_host")).as("K8s should be disabled").isFalse();
+ assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).as("CF should be disabled").isFalse();
Scheduler scheduler = context.getBean(Scheduler.class);
- assertNotNull(scheduler);
- assertTrue(scheduler.getClass().getName().contains("LocalSchedulerAutoConfiguration"));
+ assertThat(scheduler).isNotNull();
+ assertThat(scheduler.getClass().getName()).contains("LocalSchedulerAutoConfiguration");
}
}
+ @Nested
@TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true",
- "kubernetes_service_host=dummy" })
- public static class KubernetesSchedulerActivatedTests extends AbstractSchedulerPerPlatformTest {
+ "kubernetes_service_host=dummy", "spring.cloud.kubernetes.client.namespace=default" })
+ class KubernetesSchedulerActivatedTests extends AbstractSchedulerPerPlatformTest {
@Test
- public void testKubernetesSchedulerEnabled() {
- assertTrue("K8s should be enabled", context.getEnvironment().containsProperty("kubernetes_service_host"));
- assertFalse("CF should be disabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment()));
+ void kubernetesSchedulerEnabled() {
+ assertThat(context.getEnvironment().containsProperty("kubernetes_service_host")).as("K8s should be enabled").isTrue();
+ assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).as("CF should be disabled").isFalse();
KubernetesSchedulerProperties props = context.getBean(KubernetesSchedulerProperties.class);
- assertNotNull(props);
+ assertThat(props).isNotNull();
}
}
+ @Nested
@TestPropertySource(properties = { "spring.cloud.dataflow.features.schedules-enabled=true",
"VCAP_APPLICATION=\"{\"instance_id\":\"123\"}\"" })
- public static class CloudFoundrySchedulerActivatedTests extends AbstractSchedulerPerPlatformTest {
+ class CloudFoundrySchedulerActivatedTests extends AbstractSchedulerPerPlatformTest {
@Test
- public void testCloudFoundryScheudlerEnabled() {
- assertFalse("K8s should be disabled", context.getEnvironment().containsProperty("kubernetes_service_host"));
- assertTrue("CF should be enabled", CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment()));
+ void cloudFoundrySchedulerEnabled() {
+ assertThat(context.getEnvironment()
+ .containsProperty("kubernetes_service_host")).as("K8s should be disabled").isFalse();
+ assertThat(CloudPlatform.CLOUD_FOUNDRY.isActive(context.getEnvironment())).as("CF should be enabled").isTrue();
- CloudFoundrySchedulerProperties props = context.getBean(CloudFoundrySchedulerProperties.class);
- assertNotNull(props);
}
}
}
diff --git a/spring-cloud-dataflow-build/README.md b/spring-cloud-dataflow-build/README.md
new file mode 100644
index 0000000000..7c459aa9a3
--- /dev/null
+++ b/spring-cloud-dataflow-build/README.md
@@ -0,0 +1 @@
+# spring-cloud-dataflow-build
diff --git a/spring-cloud-dataflow-build/pom.xml b/spring-cloud-dataflow-build/pom.xml
new file mode 100644
index 0000000000..90a5df4fc2
--- /dev/null
+++ b/spring-cloud-dataflow-build/pom.xml
@@ -0,0 +1,739 @@
+
+
+ 4.0.0
+ org.springframework.cloud
+ spring-cloud-dataflow-build
+ 3.0.0-SNAPSHOT
+ pom
+ Spring Cloud Dataflow Build
+ Spring Cloud Dataflow Build, managing plugins and dependencies
+
+ spring-cloud-dataflow-build-dependencies
+ spring-cloud-dataflow-dependencies-parent
+ spring-cloud-dataflow-build-tools
+
+ https://spring.io/projects/spring-cloud-dataflow
+
+ 17
+ @
+ UTF-8
+ UTF-8
+ ${basedir}
+ ${project.artifactId}
+
+ 3.3.7
+
+ 3.0.0-SNAPSHOT
+ ${project.build.directory}/build-docs
+ ${project.build.directory}/refdocs/
+ 0.1.3.RELEASE
+ 2.3.7
+ 2.2.9
+ ${project.version}
+ deploy
+ ${project.version}
+
+ jacoco
+ reuseReports
+ ${project.basedir}/../target/jacoco.exec
+ java
+
+ 3.13.0
+ 3.5.0
+ 2.10
+ 2.5.7
+
+ ${maven-checkstyle-plugin.version}
+ 8.29
+ 0.0.9
+ 3.5.0
+ 3.4.1
+ 3.0.1
+ 3.3.0
+ 3.1.0
+ 3.1.2
+ 1.6.0
+ 3.3.1
+ 4.9.9
+ 3.1.1
+ 3.0.0-M2
+ 1.6
+ 3.1.1
+ 2.2.4
+ 3.0.0
+ 2.2.1
+ 1.20
+ 0.0.7
+ 3.5.0
+ false
+ true
+ true
+ true
+ main
+
+ https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow-build/${spring-cloud-build-checkstyle.branch}/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml
+
+
+ https://raw.githubusercontent.com/spring-cloud/spring-cloud-dataflow-build/${spring-cloud-build-checkstyle.branch}/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml
+
+
+ ${project.basedir}/../src/checkstyle/checkstyle-suppressions.xml
+
+ 0.0.2.RELEASE
+ true
+ 3.2.10
+ 1.8.1
+ ${project.basedir}/src/main/asciidoc/_configprops.adoc
+ .*
+ generate-resources
+ generate-resources
+
+ slow,docker
+
+
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-build-dependencies
+ 3.0.0-SNAPSHOT
+ pom
+ import
+
+
+
+
+
+
+
+ Pivotal Software, Inc.
+ https://www.spring.io
+
+
+
+ Apache License, Version 2.0
+ https://www.apache.org/licenses/LICENSE-2.0
+
+
+
+ https://github.com/spring-cloud/spring-cloud-dataflow-build
+ scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git
+
+
+ scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git
+
+ HEAD
+
+
+
+ scdf-team
+ Data Flow Team
+ https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors
+
+
+
+
+
+
+ ${basedir}/src/main/resources
+ true
+
+ **/application*.yml
+ **/application*.properties
+
+
+
+ ${basedir}/src/main/resources
+
+ **/application*.yml
+ **/application*.properties
+
+
+
+
+
+ ${basedir}/src/test/resources
+ true
+
+ **/application*.yml
+ **/application*.properties
+
+
+
+ ${basedir}/src/test/resources
+
+ **/application*.yml
+ **/application*.properties
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-eclipse-plugin
+ ${maven-eclipse-plugin.version}
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ ${maven-compiler-plugin.version}
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ ${maven-failsafe-plugin.version}
+
+ --add-opens java.base/java.util=ALL-UNNAMED
+ ${groups}
+ ${excludedGroups}
+
+
+
+
+ integration-test
+ verify
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+ ${maven-jar-plugin.version}
+
+
+
+ ${start-class}
+ true
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-antrun-plugin
+ ${maven-antrun-plugin.version}
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ ${maven-surefire-plugin.version}
+
+ 1
+ 1
+
+ **/*Tests.java
+ **/*Test.java
+
+
+ **/Abstract*.java
+
+ ${groups}
+ ${excludedGroups}
+
+
+
+ org.codehaus.mojo
+ exec-maven-plugin
+ ${exec-maven-plugin.version}
+
+ ${start-class}
+
+
+
+ org.apache.maven.plugins
+ maven-resources-plugin
+ ${maven-resources-plugin.version}
+
+
+ ${resource.delimiter}
+
+ false
+
+
+
+ io.spring.javaformat
+ spring-javaformat-maven-plugin
+ ${spring-javaformat.version}
+
+
+ validate
+
+ ${disable.checks}
+
+
+ apply
+ validate
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+ ${maven-checkstyle-plugin.version}
+
+
+ com.puppycrawl.tools
+ checkstyle
+ ${puppycrawl-tools-checkstyle.version}
+
+
+ io.spring.javaformat
+ spring-javaformat-checkstyle
+ ${spring-javaformat-checkstyle.version}
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-build-tools
+ ${project.version}
+
+
+ io.spring.nohttp
+ nohttp-checkstyle
+ ${nohttp-checkstyle.version}
+
+
+
+
+ checkstyle-validation
+ validate
+ true
+
+ ${disable.checks}
+ checkstyle.xml
+ checkstyle-header.txt
+
+ checkstyle.build.directory=${project.build.directory}
+ checkstyle.suppressions.file=${checkstyle.suppressions.file}
+ checkstyle.additional.suppressions.file=${checkstyle.additional.suppressions.file}
+
+ true
+
+
+ ${maven-checkstyle-plugin.includeTestSourceDirectory}
+
+ ${maven-checkstyle-plugin.failsOnError}
+
+
+ ${maven-checkstyle-plugin.failOnViolation}
+
+
+
+ check
+
+
+
+ no-http-checkstyle-validation
+ validate
+ true
+
+ ${disable.nohttp.checks}
+ ${checkstyle.nohttp.file}
+ **/*
+ **/.idea/**/*,**/.git/**/*,**/target/**/*,**/*.log
+ ./
+
+
+ check
+
+
+
+
+
+ io.github.git-commit-id
+ git-commit-id-maven-plugin
+ ${git-commit-id-plugin.version}
+
+
+
+ revision
+
+
+
+
+ true
+
+ ${project.build.outputDirectory}/git.properties
+
+ full
+
+
+
+
+ org.springframework.boot
+ spring-boot-maven-plugin
+ ${spring-boot.version}
+
+
+
+ repackage
+
+
+
+
+ CLASSIC
+ ${start-class}
+
+
+
+ org.apache.maven.plugins
+ maven-enforcer-plugin
+ ${maven-enforcer-plugin.version}
+
+
+ enforce-versions
+
+ enforce
+
+
+
+
+ false
+
+
+
+
+
+
+
+ org.eclipse.m2e
+ lifecycle-mapping
+ 1.0.0
+
+
+
+
+
+
+ org.apache.maven.plugins
+
+
+ maven-checkstyle-plugin
+
+
+ [2.17,)
+
+
+ check
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ ${maven-javadoc-plugin.version}
+
+
+ javadoc
+
+ jar
+
+ package
+
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 3.3.0
+
+
+ source
+
+ jar
+
+ package
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-report-plugin
+ ${maven-surefire-report-plugin.version}
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+ true
+ ${java.version}
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ ${maven-javadoc-plugin.version}
+
+
+ javadoc
+
+ jar
+
+ package
+
+
+
+
+ false
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ ${maven-source-plugin.version}
+
+
+ attach-sources
+
+ jar
+
+ package
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-report-plugin
+ ${maven-surefire-report-plugin.version}
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ ${maven-javadoc-plugin.version}
+
+
+ non-aggregate
+
+
+
+ javadoc
+
+
+
+ aggregate
+
+
+
+ aggregate
+
+
+
+
+
+
+
+
+ spring
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ true
+
+
+
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ false
+
+
+
+
+
+ milestone
+
+
+ repo.spring.io
+ Spring Milestone Repository
+ https://repo.spring.io/libs-milestone-local
+
+
+
+
+ java8
+
+ [1.8,)
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ ${maven-javadoc-plugin.version}
+
+ none
+
+
+
+
+
+
+ sonar
+
+ false
+
+
+
+
+ org.jacoco
+ jacoco-maven-plugin
+ 0.8.12
+
+ ${sonar.jacoco.reportPath}
+ true
+
+
+
+ agent
+
+ prepare-agent
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ ${maven-surefire-plugin.version}
+
+ --add-opens java.base/java.util=ALL-UNNAMED
+ 1
+ 1
+
+
+ listener
+ org.sonar.java.jacoco.JUnitListener
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ license
+
+ true
+
+
+
+
+ org.codehaus.mojo
+ license-maven-plugin
+ ${license-maven-plugin.version}
+
+
+ aggregate-licenses
+
+ license:aggregate-add-third-party
+
+
+
+
+
+
+
+
+ fast
+
+ true
+
+
+
+ failsafe
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ ${maven-failsafe-plugin.version}
+
+
+
+
+
+ checkstyle
+
+
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml
new file mode 100644
index 0000000000..c871cfb30b
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml
@@ -0,0 +1,195 @@
+
+
+ 4.0.0
+ org.springframework.cloud
+ spring-cloud-dataflow-build-dependencies
+ 3.0.0-SNAPSHOT
+ Spring Cloud Dataflow Build Dependencies
+ pom
+ Spring Cloud Dataflow Build Dependencies: an internal BOM for use with Spring
+ Cloud Dataflow projects. Use as a BOM or by inheriting from the spring-cloud-dataflow-build.
+
+
+ org.springframework.boot
+ spring-boot-dependencies
+
+ 3.3.5
+
+
+
+ 17
+ UTF-8
+ 2023.0.3
+ 3.2.5
+ 2.16.1
+ 1.12.0
+ 1.26.2
+
+ 9.39.3
+ 2.0.0-M4
+ 2.3.0
+ 3.5.4
+ 5.12.4
+ 4.13.1
+ 32.1.3-jre
+
+
+
+
+
+ com.nimbusds
+ nimbus-jose-jwt
+ ${nimbus-jose-jwt.version}
+
+
+ io.fabric8
+ kubernetes-client-bom
+ ${kubernetes-fabric8-client.version}
+ pom
+ import
+
+
+ org.springframework.cloud
+ spring-cloud-dependencies
+ ${spring-cloud.version}
+ pom
+ import
+
+
+ org.springframework.shell
+ spring-shell-dependencies
+ ${spring-shell.version}
+ pom
+ import
+
+
+ com.google.guava
+ guava
+ ${guava.version}
+
+
+ org.apache.commons
+ commons-compress
+ ${commons-compress.version}
+
+
+ commons-io
+ commons-io
+ ${commons-io.version}
+
+
+ org.apache.commons
+ commons-text
+ ${commons-text.version}
+
+
+ io.micrometer.prometheus
+ prometheus-rsocket-spring
+ ${prometheus-rsocket.version}
+
+
+ io.micrometer.prometheus
+ prometheus-rsocket-client
+ ${prometheus-rsocket.version}
+
+
+ io.pivotal.cfenv
+ java-cfenv
+ ${java-cfenv.version}
+
+
+ io.pivotal.cfenv
+ java-cfenv-boot
+ ${java-cfenv.version}
+
+
+ io.pivotal.cfenv
+ java-cfenv-boot-pivotal-scs
+ ${java-cfenv.version}
+
+
+ io.pivotal.cfenv
+ java-cfenv-boot-pivotal-sso
+ ${java-cfenv.version}
+
+
+ io.pivotal.cfenv
+ java-cfenv-jdbc
+ ${java-cfenv.version}
+
+
+ io.pivotal.spring.cloud
+ spring-cloud-services-starter-config-client
+ ${spring-cloud-services-starter-config-client.version}
+
+
+ junit
+ junit
+ ${junit.version}
+
+
+
+
+
+ spring
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ true
+
+
+
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ false
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml
new file mode 100644
index 0000000000..786abc0b73
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/pom.xml
@@ -0,0 +1,32 @@
+
+
+ 4.0.0
+ spring-cloud-dataflow-build-tools
+ spring-cloud-dataflow-build-tools
+ jar
+ Spring Cloud Dataflow Build Tools
+
+ org.springframework.cloud
+ spring-cloud-dataflow-build
+ 3.0.0-SNAPSHOT
+
+
+
+ com.puppycrawl.tools
+ checkstyle
+ ${puppycrawl-tools-checkstyle.version}
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.13.0
+
+ true
+
+
+
+
+
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml
new file mode 100644
index 0000000000..6cb6ad2669
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions-empty.xml
@@ -0,0 +1,6 @@
+
+
+
+
\ No newline at end of file
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml
new file mode 100644
index 0000000000..f5f6705862
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/checkstyle-suppressions.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml
new file mode 100644
index 0000000000..4e21a0bdf5
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/checkstyle/nohttp-checkstyle.xml
@@ -0,0 +1,29 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt
new file mode 100644
index 0000000000..ff707f0f9e
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle-header.txt
@@ -0,0 +1,15 @@
+^\Q/*\E$
+^\Q * Copyright \E(20\d\d\-)?20\d\d\Q the original author or authors.\E$
+^\Q *\E$
+^\Q * Licensed under the Apache License, Version 2.0 (the "License");\E$
+^\Q * you may not use this file except in compliance with the License.\E$
+^\Q * You may obtain a copy of the License at\E$
+^\Q *\E$
+^\Q * https://www.apache.org/licenses/LICENSE-2.0\E$
+^\Q *\E$
+^\Q * Unless required by applicable law or agreed to in writing, software\E$
+^\Q * distributed under the License is distributed on an "AS IS" BASIS,\E$
+^\Q * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\E$
+^\Q * See the License for the specific language governing permissions and\E$
+^\Q * limitations under the License.\E$
+^\Q */\E$
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml
new file mode 100644
index 0000000000..ff46fb9e86
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-tools/src/main/resources/checkstyle.xml
@@ -0,0 +1,203 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml
new file mode 100644
index 0000000000..29bac750a8
--- /dev/null
+++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent/pom.xml
@@ -0,0 +1,130 @@
+
+
+ 4.0.0
+ org.springframework.cloud
+ 3.0.0-SNAPSHOT
+ spring-cloud-dataflow-dependencies-parent
+ pom
+ Spring Cloud Dataflow Dependencies Parent
+ Spring Cloud Data Flow Build Dependencies
+ https://projects.spring.io/spring-cloud/
+
+ Pivotal Software, Inc.
+ https://www.spring.io
+
+
+
+ Apache License, Version 2.0
+ https://www.apache.org/licenses/LICENSE-2.0
+
+
+
+ https://github.com/spring-cloud/spring-cloud-dataflow-build
+ scm:git:git://github.com/spring-cloud/spring-cloud-dataflow-build.git
+
+
+ scm:git:ssh://git@github.com/spring-cloud/spring-cloud-dataflow-build.git
+
+ HEAD
+
+
+
+ scdf-team
+ Data Flow Team
+ https://github.com/spring-cloud/spring-cloud-dataflow/graphs/contributors
+
+
+
+ 17
+ UTF-8
+
+ 3.0.0-SNAPSHOT
+
+
+
+ spring
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ true
+
+
+
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ false
+
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-enforcer-plugin
+ 1.4.1
+
+
+ enforce-versions
+
+ enforce
+
+
+
+
+ false
+
+
+
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-classic-docs/pom.xml b/spring-cloud-dataflow-classic-docs/pom.xml
index bcb8389960..2e63126b78 100644
--- a/spring-cloud-dataflow-classic-docs/pom.xml
+++ b/spring-cloud-dataflow-classic-docs/pom.xml
@@ -4,7 +4,8 @@
org.springframework.cloudspring-cloud-dataflow-parent
- 2.8.0-SNAPSHOT
+ 3.0.0-SNAPSHOT
+ ../spring-cloud-dataflow-parentspring-cloud-dataflow-classic-docsSpring Cloud Data Flow Docs for Classic mode
@@ -13,38 +14,42 @@
org.springframework.cloudspring-cloud-dataflow-configuration-metadata
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-core
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-registry
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-rest-resource
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-server-core
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-rest-client
-
-
- org.springframework.cloud
- spring-cloud-dataflow-shell-core
+ ${project.version}org.springframework.cloudspring-cloud-dataflow-completion
+ ${project.version}org.springframework.cloudspring-cloud-starter-dataflow-server
+ ${project.version}test-jartest
@@ -73,20 +78,46 @@
spring-boot-starter-testtest
-
+
+ com.h2database
+ h2
+ test
+
+
+ org.awaitility
+ awaitility
+ test
+
+
org.apache.maven.plugins
- maven-surefire-plugin
-
- false
-
- **/*Documentation.java
- **/*Tests.java
-
-
+ maven-jar-plugin
+ 3.3.0
+
+
+ docs
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+ 1
+ 1
+ true
+ false
+
+ **/*Documentation.java
+
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java
index 4fb34a5ec9..cf65bf77a2 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AboutDocumentation.java
@@ -16,7 +16,7 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.springframework.http.MediaType;
import org.springframework.restdocs.payload.JsonFieldType;
@@ -29,12 +29,18 @@
/**
* @author Gunnar Hillert
* @author Ilayaperumal Gopinathan
+ * @author Chris Bono
+ * @author Corneil du Plessis
*/
-public class AboutDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+class AboutDocumentation extends BaseDocumentation {
@Test
- public void getMetaInformation() throws Exception {
- this.mockMvc.perform(get("/about").accept(MediaType.APPLICATION_JSON)).andExpect(status().isOk())
+ void getMetaInformation() throws Exception {
+ this.mockMvc.perform(
+ get("/about")
+ .accept(MediaType.APPLICATION_JSON))
+ .andExpect(status().isOk())
.andDo(this.documentationHandler.document(responseFields(
fieldWithPath("_links.self.href").description("Link to the runtime environment resource"),
@@ -156,8 +162,36 @@ public void getMetaInformation() throws Exception {
fieldWithPath("monitoringDashboardInfo.source").type(JsonFieldType.STRING).description(
"Unique DataFlow identifier within the monitoring system."),
fieldWithPath("monitoringDashboardInfo.refreshInterval").type(JsonFieldType.NUMBER).description(
- "Provides the time interval (in seconds) for updating the monitoring dashboards.")
+ "Provides the time interval (in seconds) for updating the monitoring dashboards."),
+ fieldWithPath("gitAndBuildInfo").type(JsonFieldType.OBJECT).description(
+ "Provides the git and build info for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.git").type(JsonFieldType.OBJECT).description(
+ "Provides the git details for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.git.branch").type(JsonFieldType.STRING).description(
+ "Provides the git branch for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.git.commit").type(JsonFieldType.OBJECT).description(
+ "Provides the git commit info for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.git.commit.id").type(JsonFieldType.OBJECT).description(
+ "Provides the git commit id for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.git.commit.id.abbrev").type(JsonFieldType.STRING).description(
+ "Provides the short git commit id for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.git.commit.id.full").type(JsonFieldType.STRING).description(
+ "Provides the full git commit id for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.git.commit.time").type(JsonFieldType.STRING).description(
+ "Provides the git commit time for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.build").type(JsonFieldType.OBJECT).description(
+ "Provides the build details for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.build.artifact").type(JsonFieldType.STRING).description(
+ "Provides the build artifact for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.build.name").type(JsonFieldType.STRING).description(
+ "Provides the build name for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.build.time").type(JsonFieldType.STRING).description(
+ "Provides the build time for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.build.version").type(JsonFieldType.STRING).description(
+ "Provides the build version for the Dataflow server"),
+ fieldWithPath("gitAndBuildInfo.build.group").type(JsonFieldType.STRING).description(
+ "Provides the build group for the Dataflow server")
)));
}
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java
index b3f875e032..17628b7566 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java
@@ -16,9 +16,8 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import javax.servlet.RequestDispatcher;
-
-import org.junit.Test;
+import jakarta.servlet.RequestDispatcher;
+import org.junit.jupiter.api.Test;
import org.springframework.cloud.dataflow.rest.Version;
import org.springframework.restdocs.payload.JsonFieldType;
@@ -32,7 +31,6 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath;
import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -40,18 +38,20 @@
* @author Gunnar Hillert
* @author Christian Tzolov
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
-public class ApiDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+class ApiDocumentation extends BaseDocumentation {
@Test
- public void headers() throws Exception {
+ void headers() throws Exception {
this.mockMvc.perform(get("/")).andExpect(status().isOk())
.andDo(this.documentationHandler.document(responseHeaders(headerWithName("Content-Type")
.description("The Content-Type of the payload, e.g. " + "`application/hal+json`"))));
}
@Test
- public void errors() throws Exception {
+ void errors() throws Exception {
this.mockMvc
.perform(get("/error").requestAttr(RequestDispatcher.ERROR_STATUS_CODE, 400)
.requestAttr(RequestDispatcher.ERROR_REQUEST_URI, "/apps").requestAttr(
@@ -71,8 +71,10 @@ public void errors() throws Exception {
}
@Test
- public void index() throws Exception {
- this.mockMvc.perform(get("/")).andDo(print()).andExpect(status().isOk()).andDo(this.documentationHandler.document(links(
+ void index() throws Exception {
+ this.mockMvc.perform(get("/"))
+ .andExpect(status().isOk())
+ .andDo(this.documentationHandler.document(links(
linkWithRel("about").description(
"Access meta information, including enabled " + "features, security info, version information"),
@@ -102,12 +104,18 @@ public void index() throws Exception {
linkWithRel("runtime/apps/{appId}").description("Exposes the runtime status for a specific app"),
linkWithRel("runtime/apps/{appId}/instances").description("Provides the status for app instances"),
linkWithRel("runtime/apps/{appId}/instances/{instanceId}").description("Provides the status for specific app instance"),
+ linkWithRel("runtime/apps/{appId}/instances/{instanceId}/actuator").description("EXPERIMENTAL: Allows invoking Actuator endpoint on specific app instance"),
+ linkWithRel("runtime/apps/{appId}/instances/{instanceId}/post").description("EXPERIMENTAL: Allows POST on http sink"),
linkWithRel("tasks/definitions").description("Provides the task definition resource"),
linkWithRel("tasks/definitions/definition").description("Provides details for a specific task definition"),
linkWithRel("tasks/validation").description("Provides the validation for a task definition"),
- linkWithRel("tasks/executions").description("Returns Task executions and allows launching of tasks"),
+ linkWithRel("tasks/executions").description("Returns Task executions"),
+ linkWithRel("tasks/executions/launch").description("Provides for launching a Task execution"),
+ linkWithRel("tasks/executions/external").description("Returns Task execution by external id"),
linkWithRel("tasks/executions/current").description("Provides the current count of running tasks"),
+ linkWithRel("tasks/thinexecutions").description("Returns thin Task executions"),
+ linkWithRel("tasks/thinexecutions/name").description("Returns thin Task executions for a given task name"),
linkWithRel("tasks/info/executions").description("Provides the task executions info"),
linkWithRel("tasks/schedules").description("Provides schedule information of tasks"),
linkWithRel("tasks/schedules/instances").description("Provides schedule information of a specific task "),
@@ -141,6 +149,7 @@ public void index() throws Exception {
fieldWithPath("['" + Version.REVISION_KEY + "']").description("Incremented each time a change is implemented in this REST API"),
fieldWithPath("_links.audit-records.href").description("Link to the audit records"),
fieldWithPath("_links.dashboard.href").description("Link to the dashboard"),
+
fieldWithPath("_links.streams/definitions.href").description("Link to the streams/definitions"),
fieldWithPath("_links.streams/definitions/definition.href").description("Link to the streams/definitions/definition"),
fieldWithPath("_links.streams/definitions/definition.templated").type(JsonFieldType.BOOLEAN).optional().description("Link streams/definitions/definition is templated"),
@@ -152,6 +161,11 @@ public void index() throws Exception {
fieldWithPath("_links.runtime/apps/{appId}/instances.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances is templated"),
fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}.href").description("Link to the runtime/apps/{appId}/instances/{instanceId}"),
fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId} is templated"),
+ fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/post.href").description("Link to the runtime/apps/{appId}/instances/{instanceId}/post"),
+ fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/post.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId}/post is templated"),
+
+ fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/actuator[].href").description("Link to the runtime/apps/{appId}/instances/{instanceId}/actuator"),
+ fieldWithPath("_links.runtime/apps/{appId}/instances/{instanceId}/actuator[].templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/apps/{appId}/instances/{instanceId}/actuator is templated"),
fieldWithPath("_links.runtime/streams.href").description("Link to the runtime/streams"),
fieldWithPath("_links.runtime/streams.templated").type(JsonFieldType.BOOLEAN).optional().description("Link runtime/streams is templated"),
@@ -195,11 +209,21 @@ public void index() throws Exception {
fieldWithPath("_links.tasks/definitions/definition.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/definitions/definition is templated"),
fieldWithPath("_links.tasks/executions.href").description("Link to the tasks/executions"),
+ fieldWithPath("_links.tasks/executions/launch.href").description("Link to tasks/executions/launch"),
+ fieldWithPath("_links.tasks/executions/launch.templated").type(JsonFieldType.BOOLEAN).optional().description("Indicates that Link tasks/executions/launch is templated"),
fieldWithPath("_links.tasks/executions/name.href").description("Link to the tasks/executions/name"),
fieldWithPath("_links.tasks/executions/name.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/name is templated"),
fieldWithPath("_links.tasks/executions/current.href").description("Link to the tasks/executions/current"),
fieldWithPath("_links.tasks/executions/execution.href").description("Link to the tasks/executions/execution"),
fieldWithPath("_links.tasks/executions/execution.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/execution is templated"),
+ fieldWithPath("_links.tasks/executions/external.href").description("Link to the tasks/executions/external"),
+ fieldWithPath("_links.tasks/executions/external.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/external is templated"),
+
+ fieldWithPath("_links.tasks/thinexecutions.href").description("Link to the tasks/thinexecutions"),
+
+ fieldWithPath("_links.tasks/thinexecutions/name.href").description("Link to the tasks/thinexecutions/name"),
+ fieldWithPath("_links.tasks/thinexecutions/name.templated").type(JsonFieldType.BOOLEAN).optional().description("Link to the tasks/thinexecutions/name is templated"),
+
fieldWithPath("_links.tasks/info/executions.href").description("Link to the tasks/info/executions"),
fieldWithPath("_links.tasks/info/executions.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/info is templated"),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java
index 1f1a3b3366..22ada75374 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2020 the original author or authors.
+ * Copyright 2016-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,7 +18,7 @@
import java.util.Arrays;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.http.MediaType;
@@ -32,7 +32,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
@@ -42,197 +42,205 @@
* @author Gunnar Hillert
* @author Christian Tzolov
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
-public class AppRegistryDocumentation extends BaseDocumentation {
-
- @Test
- public void appDefault() throws Exception {
- registerApp(ApplicationType.source, "http", "1.2.0.RELEASE");
- registerApp(ApplicationType.source, "http", "1.3.0.RELEASE");
-
- this.mockMvc.perform(RestDocumentationRequestBuilders
- .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.2.0.RELEASE").accept(MediaType.APPLICATION_JSON))
- .andExpect(status().isAccepted())
- .andDo(
- this.documentationHandler.document(
- pathParameters(
- parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())),
- parameterWithName("name").description("The name of the application"),
- parameterWithName("version").description("The version of the application")
- )
- )
- );
- unregisterApp(ApplicationType.source, "http", "1.2.0.RELEASE");
- unregisterApp(ApplicationType.source, "http", "1.3.0.RELEASE");
- }
-
- @Test
- public void registeringAnApplicationVersion() throws Exception {
- this.mockMvc.perform(
- post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.1.0.RELEASE")
- .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE"))
- .andExpect(status().isCreated())
- .andDo(
- this.documentationHandler.document(
- pathParameters(
- parameterWithName("type")
- .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values()) + " (optional)"),
- parameterWithName("name").description("The name of the application to register"),
- parameterWithName("version").description("The version of the application to register")
- ),
- requestParameters(
- parameterWithName("uri").description("URI where the application bits reside"),
- parameterWithName("metadata-uri").optional()
- .description("URI where the application metadata jar can be found"),
- parameterWithName("force").optional()
- .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
- )
- )
- );
-
- unregisterApp(ApplicationType.source, "http", "1.1.0.RELEASE");
- }
+@SuppressWarnings("NewClassNamingConvention")
+class AppRegistryDocumentation extends BaseDocumentation {
+ @Test
+ void appDefault() throws Exception {
+ registerApp(ApplicationType.source, "http", "4.0.0");
+ registerApp(ApplicationType.source, "http", "5.0.0");
+
+ this.mockMvc.perform(RestDocumentationRequestBuilders
+ .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "4.0.0")
+ .accept(MediaType.APPLICATION_JSON))
+ .andExpect(status().isAccepted())
+ .andDo(
+ this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())),
+ parameterWithName("name").description("The name of the application"),
+ parameterWithName("version").description("The version of the application")
+ )
+ )
+ );
+ unregisterApp(ApplicationType.source, "http", "4.0.0");
+ unregisterApp(ApplicationType.source, "http", "5.0.0");
+ }
@Test
- public void bulkRegisteringApps() throws Exception {
+ void registeringAnApplicationVersion() throws Exception {
this.mockMvc.perform(
- post("/apps")
- .param("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE")
- .param("force", "false"))
- .andExpect(status().isCreated())
- .andDo(
- this.documentationHandler.document(
- requestParameters(
- parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."),
- parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."),
- parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
- )
- )
- );
- unregisterApp(ApplicationType.source, "http");
+ post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "4.0.0").queryParam("uri",
+ "maven://org.springframework.cloud.stream.app:http-source-rabbit:4.0.0")
+ ).andExpect(status().isCreated())
+ .andDo(
+ this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("type").optional()
+ .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())),
+ parameterWithName("name").description("The name of the application to register"),
+ parameterWithName("version").description("The version of the application to register")
+ ),
+ queryParameters(
+ parameterWithName("uri").description("URI where the application bits reside"),
+ parameterWithName("metadata-uri").optional()
+ .description("URI where the application metadata jar can be found"),
+ parameterWithName("force").optional()
+ .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
+ )
+ )
+ );
+
+ unregisterApp(ApplicationType.source, "http", "4.0.0");
}
- @Test
- public void getApplicationsFiltered() throws Exception {
- registerApp(ApplicationType.source, "http", "1.2.0.RELEASE");
- registerApp(ApplicationType.source, "time", "1.2.0.RELEASE");
- this.mockMvc.perform(
- get("/apps")
- .param("search", "")
- .param("type", "source").accept(MediaType.APPLICATION_JSON)
- .param("defaultVersion", "true")
- .param("page", "0")
- .param("size", "10")
- .param("sort", "name,ASC")
- )
- .andExpect(status().isOk())
- .andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("search").description("The search string performed on the name (optional)"),
- parameterWithName("type")
- .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())),
- parameterWithName("defaultVersion").description("The boolean flag to set to retrieve only the apps of the default versions (optional)"),
- parameterWithName("page").description("The zero-based page number (optional)"),
- parameterWithName("sort").description("The sort on the list (optional)"),
- parameterWithName("size").description("The requested page size (optional)")
- ),
- responseFields(
- subsectionWithPath("_embedded.appRegistrationResourceList")
- .description("Contains a collection of application"),
- subsectionWithPath("_links.self").description("Link to the applications resource"),
- subsectionWithPath("page").description("Pagination properties")
- )
- ));
-
- unregisterApp(ApplicationType.source, "http");
- unregisterApp(ApplicationType.source, "time");
- }
@Test
- public void getSingleApplication() throws Exception {
- registerApp(ApplicationType.source, "http", "1.2.0.RELEASE");
+ void bulkRegisteringApps() throws Exception {
this.mockMvc.perform(
- get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON)
- .param("exhaustive", "false"))
- .andExpect(status().isOk())
- .andDo(
- this.documentationHandler.document(
- pathParameters(
- parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())),
- parameterWithName("name").description("The name of the application to query")
- ),
- requestParameters(
- parameterWithName("exhaustive").optional()
- .description("Return all application properties, including common Spring Boot properties")
- ),
- responseFields(
- fieldWithPath("name").description("The name of the application"),
- fieldWithPath("label").description("The label name of the application"),
- fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())),
- fieldWithPath("uri").description("The uri of the application"),
- fieldWithPath("version").description("The version of the application"),
- fieldWithPath("versions").description("All the registered versions of the application"),
- fieldWithPath("defaultVersion").description("If true, the application is the default version"),
- subsectionWithPath("options").description("The options of the application (Array)"),
- fieldWithPath("shortDescription").description("The description of the application"),
- fieldWithPath("inboundPortNames").description("Inbound port names of the application"),
- fieldWithPath("outboundPortNames").description("Outbound port names of the application")
- )
+ post("/apps")
+ .queryParam("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:4.0.0")
+ .queryParam("force", "false")
)
- );
+ .andExpect(status().isCreated())
+ .andDo(
+ this.documentationHandler.document(
+ queryParameters(
+ parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."),
+ parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."),
+ parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
+ )
+ )
+ );
unregisterApp(ApplicationType.source, "http");
}
- @Test
- public void registeringAnApplication() throws Exception {
- this.mockMvc.perform(
- post("/apps/{type}/{name}", ApplicationType.source, "http")
- .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE"))
- .andExpect(status().isCreated())
- .andDo(
- this.documentationHandler.document(
- pathParameters(
- parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())),
- parameterWithName("name").description("The name of the application to register")
- ),
- requestParameters(
- parameterWithName("uri").description("URI where the application bits reside"),
- parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"),
- parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
- )
+ @Test
+ void getApplicationsFiltered() throws Exception {
+ registerApp(ApplicationType.source, "http", "5.0.0");
+ registerApp(ApplicationType.source, "time", "5.0.0");
+ this.mockMvc.perform(
+ get("/apps")
+ .param("search", "")
+ .param("type", "source").accept(MediaType.APPLICATION_JSON)
+ .param("defaultVersion", "true")
+ .param("page", "0")
+ .param("size", "10")
+ .param("sort", "name,ASC")
)
- );
-
- unregisterApp(ApplicationType.source, "http");
- }
-
- @Test
- public void unregisteringAnApplication() throws Exception {
- registerApp(ApplicationType.source, "http", "1.2.0.RELEASE");
-
- this.mockMvc.perform(
- delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "1.2.0.RELEASE"))
- .andExpect(status().isOk())
- .andDo(
- this.documentationHandler.document(
- pathParameters(
- parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())),
- parameterWithName("name").description("The name of the application to unregister"),
- parameterWithName("version").description("The version of the application to unregister (optional)")
- )
+ .andExpect(status().isOk())
+ .andDo(this.documentationHandler.document(
+ queryParameters(
+ parameterWithName("search").optional()
+ .description("The search string performed on the name"),
+ parameterWithName("type")
+ .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())),
+ parameterWithName("defaultVersion").optional().description("The boolean flag to set to retrieve only the apps of the default versions"),
+ parameterWithName("page").optional().description("The zero-based page number"),
+ parameterWithName("sort").optional().description("The sort on the list"),
+ parameterWithName("size").optional().description("The requested page size")
+ ),
+ responseFields(
+ subsectionWithPath("_embedded.appRegistrationResourceList")
+ .description("Contains a collection of application"),
+ subsectionWithPath("_links.self").description("Link to the applications resource"),
+ subsectionWithPath("page").description("Pagination properties")
+ )
+ ));
+
+ unregisterApp(ApplicationType.source, "http");
+ unregisterApp(ApplicationType.source, "time");
+ }
+
+ @Test
+ void getSingleApplication() throws Exception {
+ registerApp(ApplicationType.source, "http", "5.0.0");
+ this.mockMvc.perform(
+ get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON)
+ .param("exhaustive", "false"))
+ .andExpect(status().isOk())
+ .andDo(
+ this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())),
+ parameterWithName("name").description("The name of the application to query")
+ ),
+ queryParameters(
+ parameterWithName("exhaustive").optional()
+ .description("Return all application properties, including common Spring Boot properties")
+ ),
+ responseFields(
+ fieldWithPath("name").description("The name of the application"),
+ fieldWithPath("label").description("The label name of the application"),
+ fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())),
+ fieldWithPath("uri").description("The uri of the application"),
+ fieldWithPath("metaDataUri").description("The uri of the application metadata").optional(),
+ fieldWithPath("version").description("The version of the application"),
+ fieldWithPath("versions").description("All the registered versions of the application"),
+ fieldWithPath("defaultVersion").description("If true, the application is the default version"),
+ subsectionWithPath("options").description("The options of the application (Array)"),
+ fieldWithPath("shortDescription").description("The description of the application"),
+ fieldWithPath("inboundPortNames").description("Inbound port names of the application"),
+ fieldWithPath("outboundPortNames").description("Outbound port names of the application"),
+ fieldWithPath("optionGroups").description("Option groups of the application")
+ )
+ )
+ );
+ unregisterApp(ApplicationType.source, "http");
+ }
+
+ @Test
+ void registeringAnApplication() throws Exception {
+ this.mockMvc.perform(
+ post("/apps/{type}/{name}", ApplicationType.source, "http")
+ .queryParam("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:5.0.0")
)
- );
- }
-
- @Test
- public void unregisteringAllApplications() throws Exception {
- registerApp(ApplicationType.source, "http", "1.2.0.RELEASE");
- registerApp(ApplicationType.source, "http", "1.3.0.RELEASE");
- this.mockMvc.perform(
- delete("/apps"))
- .andExpect(status().isOk()
- );
- }
+ .andExpect(status().isCreated())
+ .andDo(
+ this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())),
+ parameterWithName("name").description("The name of the application to register")
+ ),
+ queryParameters(
+ parameterWithName("uri").description("URI where the application bits reside"),
+ parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"),
+ parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur")
+ )
+ )
+ );
+
+ unregisterApp(ApplicationType.source, "http");
+ }
+
+ @Test
+ void unregisteringAnApplication() throws Exception {
+ registerApp(ApplicationType.source, "http", "5.0.0");
+
+ this.mockMvc.perform(
+ delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "5.0.0"))
+ .andExpect(status().isOk())
+ .andDo(
+ this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())),
+ parameterWithName("name").description("The name of the application to unregister"),
+ parameterWithName("version").optional().description("The version of the application to unregister")
+ )
+ )
+ );
+ }
+
+ @Test
+ void unregisteringAllApplications() throws Exception {
+ registerApp(ApplicationType.source, "http", "4.0.0");
+ registerApp(ApplicationType.source, "http", "5.0.0");
+ this.mockMvc.perform(
+ delete("/apps"))
+ .andExpect(status().isOk()
+ );
+ }
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java
index a9f2c3cbc7..3c54a3cc1e 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AuditRecordsDocumentation.java
@@ -16,10 +16,11 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
@@ -28,26 +29,21 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Documentation for the {@code /audit-records} endpoint.
*
* @author Gunnar Hillert
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class AuditRecordsDocumentation extends BaseDocumentation {
-
- private static boolean setUpIsDone = false;
-
- @Before
- public void setup() throws Exception {
- if (setUpIsDone) {
- return;
- }
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class AuditRecordsDocumentation extends BaseDocumentation {
+ @BeforeEach
+ void setup() throws Exception {
this.mockMvc.perform(
post("/apps/{type}/time", "source")
.param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE"))
@@ -62,11 +58,10 @@ public void setup() throws Exception {
.param("definition", "time --format='YYYY MM DD' | log")
.param("deploy", "false"))
.andExpect(status().isCreated());
- setUpIsDone = true;
}
@Test
- public void listAllAuditRecords() throws Exception {
+ void listAllAuditRecords() throws Exception {
this.mockMvc.perform(
get("/audit-records")
.param("page", "0")
@@ -76,18 +71,17 @@ public void listAllAuditRecords() throws Exception {
.param("fromDate", "2000-01-01T00:00:00")
.param("toDate", "2099-01-01T00:00:00")
)
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page").description("The zero-based page number (optional)"),
- parameterWithName("size").description("The requested page size (optional)"),
- parameterWithName("operations").description("Comma-separated list of Audit Operations (optional)"),
- parameterWithName("actions").description("Comma-separated list of Audit Actions (optional)"),
- parameterWithName("fromDate")
- .description("From date filter (ex.: 2019-02-03T00:00:30) (optional)"),
- parameterWithName("toDate")
- .description("To date filter (ex.: 2019-02-03T00:00:30) (optional)")
+ queryParameters(
+ parameterWithName("page").optional().description("The zero-based page number"),
+ parameterWithName("size").optional().description("The requested page size"),
+ parameterWithName("operations").optional().description("Comma-separated list of Audit Operations"),
+ parameterWithName("actions").optional().description("Comma-separated list of Audit Actions"),
+ parameterWithName("fromDate").optional()
+ .description("From date filter (ex.: 2019-02-03T00:00:30)"),
+ parameterWithName("toDate").optional()
+ .description("To date filter (ex.: 2019-02-03T00:00:30)")
),
responseFields(
subsectionWithPath("_embedded.auditRecordResourceList")
@@ -97,18 +91,17 @@ public void listAllAuditRecords() throws Exception {
}
@Test
- public void getAuditRecord() throws Exception {
+ void getAuditRecord() throws Exception {
this.mockMvc.perform(
get("/audit-records/{id}", "5"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("id").description("The id of the audit record to query (required)")
+ parameterWithName("id").description("The id of the audit record to query")
),
responseFields(
fieldWithPath("auditRecordId").description("The id of the audit record"),
- fieldWithPath("createdBy").description("The author of the audit record (optional)"),
+ fieldWithPath("createdBy").optional().description("The author of the audit record"),
fieldWithPath("correlationId").description("The correlation ID of the audit record"),
fieldWithPath("auditData").description("The data of the audit record"),
fieldWithPath("createdOn").description("The creation date of the audit record"),
@@ -121,19 +114,17 @@ public void getAuditRecord() throws Exception {
}
@Test
- public void getAuditActionTypes() throws Exception {
+ void getAuditActionTypes() throws Exception {
this.mockMvc.perform(
get("/audit-records/audit-action-types"))
- .andDo(print())
.andExpect(status().isOk()
);
}
@Test
- public void getAuditOperationTypes() throws Exception {
+ void getAuditOperationTypes() throws Exception {
this.mockMvc.perform(
get("/audit-records/audit-operation-types"))
- .andDo(print())
.andExpect(status().isOk()
);
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java
index 76318492e5..acaffed639 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016-2018 the original author or authors.
+ * Copyright 2016-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,7 +17,7 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -25,9 +25,9 @@
import javax.sql.DataSource;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Rule;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.extension.RegisterExtension;
import org.mockito.ArgumentMatchers;
import org.springframework.cloud.dataflow.core.ApplicationType;
@@ -36,6 +36,7 @@
import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController;
import org.springframework.cloud.dataflow.server.service.SchedulerService;
import org.springframework.cloud.dataflow.server.single.LocalDataflowResource;
+import org.springframework.cloud.deployer.spi.app.ActuatorOperations;
import org.springframework.cloud.deployer.spi.app.AppDeployer;
import org.springframework.cloud.deployer.spi.scheduler.ScheduleInfo;
import org.springframework.cloud.deployer.spi.scheduler.ScheduleRequest;
@@ -47,9 +48,11 @@
import org.springframework.cloud.skipper.domain.Status;
import org.springframework.cloud.skipper.domain.StatusCode;
import org.springframework.cloud.skipper.domain.VersionInfo;
+import org.springframework.context.ApplicationContext;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
-import org.springframework.restdocs.JUnitRestDocumentation;
+import org.springframework.restdocs.RestDocumentationContextProvider;
+import org.springframework.restdocs.RestDocumentationExtension;
import org.springframework.restdocs.mockmvc.RestDocumentationResultHandler;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.servlet.MockMvc;
@@ -73,16 +76,19 @@
* @author Gunnar Hillert
* @author Ilayaperumal Gopinathan
* @author Glenn Renfro
+ * @author Corneil du Plessis
*/
+@ExtendWith(RestDocumentationExtension.class)
public abstract class BaseDocumentation {
private static String skipperServerPort;
- @ClassRule
+ @RegisterExtension
public final static LocalDataflowResource springDataflowServer = new LocalDataflowResource(
- "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort);
- @Before
- public void setupMocks() throws Exception{
+ "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort);
+
+ @BeforeEach
+ public void setupMocks(RestDocumentationContextProvider restDocumentationContextProvider) throws Exception {
reset(springDataflowServer.getSkipperClient());
AboutResource about = new AboutResource();
@@ -98,19 +104,17 @@ public void setupMocks() throws Exception{
info.getStatus().setStatusCode(StatusCode.UNKNOWN);
when(springDataflowServer.getSkipperClient().status(ArgumentMatchers.anyString())).thenReturn(info);
- Deployer deployer = new Deployer("default", "local", mock(AppDeployer.class));
- when(springDataflowServer.getSkipperClient().listDeployers()).thenReturn(Arrays.asList(deployer));
+ Deployer deployer = new Deployer("default", "local", mock(AppDeployer.class), mock(ActuatorOperations.class));
+ when(springDataflowServer.getSkipperClient().listDeployers()).thenReturn(Collections.singletonList(deployer));
when(springDataflowServer.getSkipperClient().search(ArgumentMatchers.anyString(), ArgumentMatchers.anyBoolean())).thenReturn(new ArrayList<>());
- this.prepareDocumentationTests(springDataflowServer.getWebApplicationContext());
+ this.prepareDocumentationTests(springDataflowServer.getWebApplicationContext(),
+ restDocumentationContextProvider);
}
public static final String TARGET_DIRECTORY = "target/generated-snippets";
- @Rule
- public JUnitRestDocumentation restDocumentation = new JUnitRestDocumentation(TARGET_DIRECTORY);
-
protected MockMvc mockMvc;
protected RestDocumentationResultHandler documentationHandler;
@@ -119,74 +123,80 @@ public void setupMocks() throws Exception{
protected DataSource dataSource;
- protected void prepareDocumentationTests(WebApplicationContext context) throws Exception{
+ protected ApplicationContext context;
+
+ protected void prepareDocumentationTests(WebApplicationContext context,
+ RestDocumentationContextProvider restDocumentationContextProvider) {
+ this.context = context;
this.documentationHandler = document("{class-name}/{method-name}", preprocessResponse(prettyPrint()));
this.documentation = new ToggleableResultHandler(documentationHandler);
this.mockMvc = MockMvcBuilders.webAppContextSetup(context)
- .apply(documentationConfiguration(this.restDocumentation).uris().withPort(9393))
- .alwaysDo((ToggleableResultHandler)this.documentation).build();
+ .apply(documentationConfiguration(restDocumentationContextProvider).uris().withPort(9393))
+ .alwaysDo((ToggleableResultHandler) this.documentation).build();
this.dataSource = springDataflowServer.getWebApplicationContext().getBean(DataSource.class);
- TaskSchedulerController controller = this.springDataflowServer.getWebApplicationContext().getBean(TaskSchedulerController.class);
+ TaskSchedulerController controller = springDataflowServer.getWebApplicationContext()
+ .getBean(TaskSchedulerController.class);
ReflectionTestUtils.setField(controller, "schedulerService", schedulerService());
- TaskPlatform taskPlatform = this.springDataflowServer.getWebApplicationContext().getBean(TaskPlatform.class);
+ TaskPlatform taskPlatform = springDataflowServer.getWebApplicationContext().getBean(TaskPlatform.class);
Launcher launcher = taskPlatform.getLaunchers().stream().filter(launcherToFilter -> launcherToFilter.getName().equals("default")).findFirst().get();
ReflectionTestUtils.setField(launcher, "scheduler", localTestScheduler());
}
/**
* Can be used by subclasses to easily register dummy apps, as most endpoints require apps to be effective
- * @param type the type of app to register
- * @param name the name of the app to register
+ *
+ * @param type the type of app to register
+ * @param name the name of the app to register
* @param version the version to register
*/
void registerApp(ApplicationType type, String name, String version) throws Exception {
- String group = type == ApplicationType.task ? "org.springframework.cloud.task.app" : "org.springframework.cloud.stream.app";
+ String group = type == ApplicationType.task ? "io.spring" : "org.springframework.cloud.stream.app";
String binder = type == ApplicationType.task ? "" : "-rabbit";
documentation.dontDocument(
() -> this.mockMvc.perform(
- post(String.format("/apps/%s/%s/%s", type, name, version))
- .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version)))
+ post(String.format("/apps/%s/%s/%s", type, name, version))
+ .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version)))
.andExpect(status().isCreated())
);
}
void unregisterApp(ApplicationType type, String name) throws Exception {
documentation.dontDocument(
- () -> this.mockMvc.perform(
- delete(String.format("/apps/%s/%s", type, name))
- )
- .andExpect(status().isOk())
+ () -> this.mockMvc.perform(
+ delete(String.format("/apps/%s/%s", type, name))
+ )
+ .andExpect(status().isOk())
);
}
void unregisterApp(ApplicationType type, String name, String version) throws Exception {
documentation.dontDocument(
- () -> this.mockMvc.perform(
- delete(String.format("/apps/%s/%s/%s", type, name, version))
- )
- .andExpect(status().isOk())
+ () -> this.mockMvc.perform(
+ delete(String.format("/apps/%s/%s/%s", type, name, version))
+ )
+ .andExpect(status().isOk())
);
}
- void createStream(String name, String definition, boolean deploy) throws Exception{
+ void createStream(String name, String definition, boolean deploy) throws Exception {
documentation.dontDocument(
- () -> this.mockMvc.perform(
- post("/streams/definitions")
- .param("name", name)
- .param("definition", definition)
- .param("deploy", String.valueOf(deploy)))
- .andExpect(status().isCreated())
+ () -> this.mockMvc.perform(
+ post("/streams/definitions")
+ .param("name", name)
+ .param("definition", definition)
+ .param("deploy", String.valueOf(deploy)))
+ .andExpect(status().isCreated())
);
}
- void destroyStream(String name) throws Exception{
+ void destroyStream(String name) throws Exception {
documentation.dontDocument(
- () -> this.mockMvc.perform(
- delete("/streams/definitions/{name}", name))
- .andExpect(status().isOk())
+ () -> this.mockMvc.perform(
+ delete("/streams/definitions/{name}", name))
+ .andExpect(status().isOk())
);
}
@@ -194,6 +204,7 @@ void destroyStream(String name) throws Exception{
* A {@link ResultHandler} that can be turned off and on.
*
* @author Eric Bottard
+ * @author Corneil du Plessis
*/
private static class ToggleableResultHandler implements ResultHandler, RestDocs {
private final ResultHandler delegate;
@@ -230,6 +241,7 @@ public void dontDocument(Callable action) throws Exception {
* are not documented.
*
* @author Eric Bottard
+ * @author Corneil du Plessis
*/
@FunctionalInterface
public interface RestDocs {
@@ -240,8 +252,8 @@ public SchedulerService schedulerService() {
return new SchedulerService() {
@Override
public void schedule(String scheduleName, String taskDefinitionName,
- Map taskProperties, List commandLineArgs,
- String platformName) {
+ Map taskProperties, List commandLineArgs,
+ String platformName) {
}
@Override
@@ -264,7 +276,7 @@ public void unscheduleForTaskDefinition(String taskDefinitionName) {
@Override
public List list(Pageable pageable, String taskDefinitionName,
- String platformName) {
+ String platformName) {
return null;
}
@@ -316,7 +328,7 @@ private List getSampleList() {
scheduleInfo.setScheduleName("FOO");
scheduleInfo.setTaskDefinitionName("BAR");
Map props = new HashMap<>(1);
- props.put("scheduler.AAA.spring.cloud.scheduler.cron.expression", "00 41 17 ? * *");
+ props.put("deployer.AAA.spring.cloud.scheduler.cron.expression", "00 41 17 ? * *");
scheduleInfo.setScheduleProperties(props);
result.add(scheduleInfo);
return result;
@@ -345,4 +357,4 @@ public List list() {
}
};
}
- }
+}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java
index ee3257a325..67011d046d 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,40 +16,36 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
+import java.time.LocalDateTime;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
-import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
-import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory;
-import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.dataflow.core.TaskManifest;
import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao;
-import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao;
import org.springframework.cloud.task.batch.listener.TaskBatchDao;
-import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao;
import org.springframework.cloud.task.repository.TaskExecution;
import org.springframework.cloud.task.repository.dao.TaskExecutionDao;
-import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean;
-import org.springframework.http.MediaType;
import org.springframework.jdbc.core.JdbcTemplate;
-import org.springframework.jdbc.datasource.DataSourceTransactionManager;
+import org.springframework.restdocs.payload.JsonFieldType;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.junit4.SpringRunner;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.put;
@@ -58,306 +54,306 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
/**
* Documentation for the /jobs/executions endpoint.
*
* @author Glenn Renfro
+ * @author Corneil du Plessis
*/
-@RunWith(SpringRunner.class)
-@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class })
+@SuppressWarnings("NewClassNamingConvention")
+@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class})
@DirtiesContext
-public class JobExecutionsDocumentation extends BaseDocumentation {
+class JobExecutionsDocumentation extends BaseDocumentation {
private final static String JOB_NAME = "DOCJOB";
- private static boolean initialized;
private JobRepository jobRepository;
- private TaskExecutionDao dao;
+
+ private TaskExecutionDao taskExecutionDao;
+
private TaskBatchDao taskBatchDao;
+
private JdbcTemplate jdbcTemplate;
+ private DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao;
- @Before
- public void setup() throws Exception {
- if (!initialized) {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
- initialize();
- createJobExecution(JOB_NAME, BatchStatus.STARTED);
- createJobExecution(JOB_NAME + "1", BatchStatus.STOPPED);
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
+ initialize();
+ createJobExecution(JOB_NAME, BatchStatus.STARTED);
+ createJobExecution(JOB_NAME + "1", BatchStatus.STOPPED);
- jdbcTemplate = new JdbcTemplate(this.dataSource);
- jdbcTemplate.afterPropertiesSet();
- jdbcTemplate.update(
- "INSERT into task_deployment(id, object_version, task_deployment_id, task_definition_name, platform_name, created_on) " +
- "values (?,?,?,?,?,?)",
- 1, 1, "2", JOB_NAME + "_1", "default", new Date());
- documentation.dontDocument(() -> this.mockMvc.perform(
- post("/tasks/definitions")
- .param("name", "DOCJOB1")
- .param("definition", "timestamp --format='YYYY MM DD'"))
- .andExpect(status().isOk()));
+ jdbcTemplate = new JdbcTemplate(this.dataSource);
+ jdbcTemplate.afterPropertiesSet();
+ jdbcTemplate.update(
+ "INSERT into task_deployment(id, object_version, task_deployment_id, task_definition_name, platform_name, created_on) "
+ + "values (?,?,?,?,?,?)",
+ 1, 1, "2", JOB_NAME + "_1", "default", new Date());
- initialized = true;
- }
+ documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/tasks/definitions").queryParam("name", "DOCJOB1")
+ .queryParam("definition", "timestamp --format='YYYY MM DD'"))
+ .andExpect(status().isOk()));
}
@Test
- public void listJobExecutions() throws Exception {
+ void listJobExecutions() throws Exception {
this.mockMvc.perform(
- get("/jobs/executions")
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ get("/jobs/executions")
+ .queryParam("page", "0")
+ .queryParam("size", "10"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)")),
- responseFields(
- subsectionWithPath("_embedded.jobExecutionResourceList")
- .description("Contains a collection of Job Executions/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties")
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size")),
+ responseFields(
+ subsectionWithPath("_embedded.jobExecutionResourceList")
+ .description("Contains a collection of Job Executions/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties")
)));
}
@Test
- public void listThinJobExecutions() throws Exception {
+ void listThinJobExecutions() throws Exception {
this.mockMvc.perform(
- get("/jobs/thinexecutions")
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ get("/jobs/thinexecutions")
+ .queryParam("page", "0")
+ .queryParam("size", "10"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)")),
- responseFields(
- subsectionWithPath("_embedded.jobExecutionThinResourceList")
- .description("Contains a collection of Job Executions without step executions included/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties")
- )));
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size")),
+ responseFields(
+ subsectionWithPath("_embedded.jobExecutionThinResourceList")
+ .description("Contains a collection of Job Executions without step executions included/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties")
+ )));
}
@Test
- public void listThinJobExecutionsByJobInstanceId() throws Exception {
+ void listThinJobExecutionsByJobInstanceId() throws Exception {
this.mockMvc.perform(
- get("/jobs/thinexecutions")
- .param("page", "0")
- .param("size", "10")
- .param("jobInstanceId", "1"))
- .andDo(print())
+ get("/jobs/thinexecutions")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("jobInstanceId", "1"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)"),
- parameterWithName("jobInstanceId")
- .description("Filter result by the job instance id")),
- responseFields(
- subsectionWithPath("_embedded.jobExecutionThinResourceList")
- .description("Contains a collection of Job Executions without step executions included/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties")
- )));
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("jobInstanceId")
+ .description("Filter result by the job instance id")),
+ responseFields(
+ subsectionWithPath("_embedded.jobExecutionThinResourceList")
+ .description("Contains a collection of Job Executions without step executions included/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties")
+ )));
}
@Test
- public void listThinJobExecutionsByTaskExecutionId() throws Exception {
+ void listThinJobExecutionsByTaskExecutionId() throws Exception {
this.mockMvc.perform(
- get("/jobs/thinexecutions")
- .param("page", "0")
- .param("size", "10")
- .param("taskExecutionId", "1"))
- .andDo(print())
+ get("/jobs/thinexecutions")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("taskExecutionId", "1"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)"),
- parameterWithName("taskExecutionId")
- .description("Filter result by the task execution id")),
- responseFields(
- subsectionWithPath("_embedded.jobExecutionThinResourceList")
- .description("Contains a collection of Job Executions without step executions included/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties")
- )));
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("taskExecutionId")
+ .description("Filter result by the task execution id")),
+ responseFields(
+ subsectionWithPath("_embedded.jobExecutionThinResourceList")
+ .description("Contains a collection of Job Executions without step executions included/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties")
+ )));
}
@Test
- public void listThinJobExecutionsByDate() throws Exception {
+ void listThinJobExecutionsByDate() throws Exception {
this.mockMvc.perform(
- get("/jobs/thinexecutions")
- .param("page", "0")
- .param("size", "10")
- .param("fromDate", "2000-09-24T17:00:45,000")
- .param("toDate", "2050-09-24T18:00:45,000"))
- .andDo(print())
+ get("/jobs/thinexecutions")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("fromDate", "2000-09-24T17:00:45,000")
+ .queryParam("toDate", "2050-09-24T18:00:45,000"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)"),
- parameterWithName("fromDate")
- .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"),
- parameterWithName("toDate")
- .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")),
- responseFields(
- subsectionWithPath("_embedded.jobExecutionThinResourceList")
- .description("Contains a collection of Job Executions without step executions included/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties")
- )));
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("fromDate")
+ .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"),
+ parameterWithName("toDate")
+ .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")),
+ responseFields(
+ subsectionWithPath("_embedded.jobExecutionThinResourceList")
+ .description("Contains a collection of Job Executions without step executions included/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties")
+ )));
}
@Test
- public void listJobExecutionsByName() throws Exception {
+ void listJobExecutionsByName() throws Exception {
this.mockMvc.perform(
- get("/jobs/executions")
- .param("name", JOB_NAME)
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ get("/jobs/executions")
+ .queryParam("name", JOB_NAME)
+ .queryParam("page", "0")
+ .queryParam("size", "10"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)"),
- parameterWithName("name")
- .description("The name associated with the job execution")),
- responseFields(
- subsectionWithPath("_embedded.jobExecutionResourceList")
- .description("Contains a collection of Job Executions/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties")
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("name")
+ .description("The name associated with the job execution")),
+ responseFields(
+ subsectionWithPath("_embedded.jobExecutionResourceList")
+ .description("Contains a collection of Job Executions/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties")
)));
}
@Test
- public void listThinJobExecutionsByName() throws Exception {
+ void listThinJobExecutionsByName() throws Exception {
this.mockMvc.perform(
- get("/jobs/thinexecutions")
- .param("name", JOB_NAME)
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ get("/jobs/thinexecutions")
+ .queryParam("name", JOB_NAME)
+ .queryParam("page", "0")
+ .queryParam("size", "10"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)"),
- parameterWithName("name")
- .description("The name associated with the job execution")),
- responseFields(
- subsectionWithPath("_embedded.jobExecutionThinResourceList")
- .description("Contains a collection of Job Executions without step executions included/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties")
- )));
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("name")
+ .description("The name associated with the job execution")),
+ responseFields(
+ subsectionWithPath("_embedded.jobExecutionThinResourceList")
+ .description("Contains a collection of Job Executions without step executions included/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties")
+ )));
}
@Test
- public void jobDisplayDetail() throws Exception {
+ void jobDisplayDetail() throws Exception {
this.mockMvc.perform(
- get("/jobs/executions/{id}", "2"))
- .andDo(print())
- .andExpect(status().isOk())
- .andDo(this.documentationHandler.document(
- pathParameters(
- parameterWithName("id").description("The id of an existing job execution (required)")
- ),
- responseFields(
- fieldWithPath("executionId").description("The execution ID of the job execution"),
- fieldWithPath("stepExecutionCount").description("the number of step of the job execution"),
- fieldWithPath("jobId").description("The job ID of the job execution"),
- fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"),
- fieldWithPath("name").description("The name of the job execution"),
- fieldWithPath("startDate").description("The start date of the job execution"),
- fieldWithPath("startTime").description("The start time of the job execution"),
- fieldWithPath("duration").description("The duration of the job execution"),
- fieldWithPath("jobParameters").description("The parameters of the job execution"),
- fieldWithPath("jobParametersString").description("The parameters string of the job execution"),
- fieldWithPath("restartable").description("The status restartable of the job execution"),
- fieldWithPath("abandonable").description("The status abandonable of the job execution"),
- fieldWithPath("stoppable").description("The status stoppable of the job execution"),
- fieldWithPath("defined").description("The status defined of the job execution"),
- fieldWithPath("timeZone").description("The time zone of the job execution"),
- subsectionWithPath("jobExecution").description("The details of the job execution"),
- subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"),
- subsectionWithPath("_links.self").description("Link to the stream definition resource")
+ get("/jobs/executions/{id}", "2")
)
- ));
+ .andExpect(status().isOk())
+ .andDo(this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("id").description("The id of an existing job execution")
+ ),
+ responseFields(
+ fieldWithPath("executionId").description("The execution ID of the job execution"),
+ fieldWithPath("stepExecutionCount").description("the number of step of the job execution"),
+ fieldWithPath("jobId").description("The job ID of the job execution"),
+ fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"),
+ fieldWithPath("name").description("The name of the job execution"),
+ fieldWithPath("startDate").description("The start date of the job execution"),
+ fieldWithPath("startTime").description("The start time of the job execution"),
+ fieldWithPath("duration").description("The duration of the job execution"),
+ fieldWithPath("jobParameters").description("The parameters of the job execution"),
+ fieldWithPath("jobParametersString").description("The parameters string of the job execution"),
+ fieldWithPath("restartable").description("The status restartable of the job execution"),
+ fieldWithPath("abandonable").description("The status abandonable of the job execution"),
+ fieldWithPath("stoppable").description("The status stoppable of the job execution"),
+ fieldWithPath("defined").description("The status defined of the job execution"),
+ fieldWithPath("timeZone").description("The time zone of the job execution"),
+ subsectionWithPath("jobExecution").description("The details of the job execution"),
+ subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"),
+ subsectionWithPath("_links.self").description("Link to the stream definition resource"),
+ subsectionWithPath("_links.stop").type(JsonFieldType.OBJECT).description("Link to stopping the job").optional(),
+ subsectionWithPath("_links.restart").type(JsonFieldType.OBJECT).description("Link to restarting the job").optional()
+ )
+ ));
}
@Test
- public void jobStop() throws Exception {
- this.mockMvc.perform(put("/jobs/executions/{id}", "1").accept(MediaType.APPLICATION_JSON).param("stop", "true"))
- .andDo(print())
+ void jobStop() throws Exception {
+ this.mockMvc.perform(put("/jobs/executions/{id}", "1")
+ .queryParam("stop", "true")
+ )
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("id")
- .description("The id of an existing job execution (required)"))
- , requestParameters(
- parameterWithName("stop")
+ .description("The id of an existing job execution"))
+ , queryParameters(
+ parameterWithName("stop").optional()
.description("Sends signal to stop the job if set to true"))));
}
@Test
- public void jobRestart() throws Exception {
- this.mockMvc.perform(put("/jobs/executions/{id}", "2").accept(MediaType.APPLICATION_JSON).param("restart", "true"))
- .andDo(print())
+ void jobRestart() throws Exception {
+ this.mockMvc.perform(put("/jobs/executions/{id}", "2")
+ .queryParam("restart", "true")
+ .queryParam("useJsonJobParameters", "true")
+ )
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- pathParameters(parameterWithName("id")
- .description("The id of an existing job execution (required)"))
- , requestParameters(
- parameterWithName("restart")
- .description("Sends signal to restart the job if set to true"))));
+ pathParameters(parameterWithName("id")
+ .description("The id of an existing job execution"))
+ , queryParameters(
+ parameterWithName("useJsonJobParameters").description("If true dataflow will " +
+ "serialize job parameters as JSON. Default is null, and the default " +
+ "configuration will be used to determine serialization method.").optional(),
+ parameterWithName("restart").optional()
+ .description("Sends signal to restart the job if set to true")
+ )
+ )
+ );
}
- private void initialize() throws Exception {
- JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean();
- repositoryFactoryBean.setDataSource(this.dataSource);
- repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource));
- this.jobRepository = repositoryFactoryBean.getObject();
- this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject();
- this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource);
+ private void initialize() {
+ this.taskExecutionDao = context.getBean(TaskExecutionDao.class);
+ this.taskBatchDao = context.getBean(TaskBatchDao.class);
+ this.jobRepository = context.getBean(JobRepository.class);
+ this.dataflowTaskExecutionMetadataDao = context.getBean(DataflowTaskExecutionMetadataDao.class);
}
- private void createJobExecution(String name, BatchStatus status) {
- TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null);
- Map jobParameterMap = new HashMap<>();
+ private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
+ TaskExecution taskExecution = taskExecutionDao.createTaskExecution(name, LocalDateTime.now(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null);
+ Map> jobParameterMap = new HashMap<>();
JobParameters jobParameters = new JobParameters(jobParameterMap);
- JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null);
- this.taskBatchDao.saveRelationship(taskExecution, jobExecution);
+ JobExecution jobExecution = this.jobRepository.createJobExecution(name, jobParameters);
+ taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
+ jobExecution.setStartTime(LocalDateTime.now());
this.jobRepository.update(jobExecution);
- TaskManifest manifest = new TaskManifest();
+ final TaskManifest manifest = new TaskManifest();
manifest.setPlatformName("default");
- DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource);
-
- DataflowTaskExecutionMetadataDao metadataDao = new JdbcDataflowTaskExecutionMetadataDao(
- dataSource, incrementerFactory.getIncrementer("h2", "task_execution_metadata_seq"));
+ assertThat(dataflowTaskExecutionMetadataDao).isNotNull();
TaskManifest taskManifest = new TaskManifest();
taskManifest.setPlatformName("default");
- metadataDao.save(taskExecution, taskManifest);
+ dataflowTaskExecutionMetadataDao.save(taskExecution, taskManifest);
}
-
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java
index c789368bc3..3cc14b2d13 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017 the original author or authors.
+ * Copyright 2017-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,29 +16,26 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
-import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.task.batch.listener.TaskBatchDao;
-import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao;
import org.springframework.cloud.task.repository.TaskExecution;
import org.springframework.cloud.task.repository.dao.TaskExecutionDao;
-import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean;
-import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.junit4.SpringRunner;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath;
@@ -46,7 +43,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -54,43 +51,40 @@
* Documentation for the /jobs/instances endpoint.
*
* @author Glenn Renfro
+ * @author Corneil du Plessis
*/
-@RunWith(SpringRunner.class)
-@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class })
+@SuppressWarnings({"NewClassNamingConvention","SameParameterValue"})
+
+@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class})
@DirtiesContext
-public class JobInstancesDocumentation extends BaseDocumentation {
+class JobInstancesDocumentation extends BaseDocumentation {
private final static String JOB_NAME = "DOCJOB";
- private static boolean initialized;
private JobRepository jobRepository;
- private TaskExecutionDao dao;
+ private TaskExecutionDao taskExecutionDao;
private TaskBatchDao taskBatchDao;
- @Before
- public void setup() throws Exception {
- if (!initialized) {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
- initialize();
- createJobExecution(JOB_NAME, BatchStatus.STARTED);
- initialized = true;
- }
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
+ initialize();
+ createJobExecution(JOB_NAME, BatchStatus.STARTED);
}
@Test
- public void listJobInstances() throws Exception {
+ void listJobInstances() throws Exception {
this.mockMvc.perform(
get("/jobs/instances")
.param("name", JOB_NAME)
.param("page", "0")
.param("size", "10"))
- .andDo(print())
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)"),
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
parameterWithName("name")
.description("The name associated with the job instance")),
responseFields(
@@ -101,14 +95,13 @@ public void listJobInstances() throws Exception {
}
@Test
- public void jobDisplayDetail() throws Exception {
+ void jobDisplayDetail() throws Exception {
this.mockMvc.perform(
get("/jobs/instances/{id}", "1"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("id").description("The id of an existing job instance (required)")
+ parameterWithName("id").description("The id of an existing job instance")
),
responseFields(
fieldWithPath("jobName").description("The name of the job instance"),
@@ -120,21 +113,18 @@ public void jobDisplayDetail() throws Exception {
}
- private void initialize() throws Exception {
- JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean();
- repositoryFactoryBean.setDataSource(this.dataSource);
- repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource));
- this.jobRepository = repositoryFactoryBean.getObject();
- this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject();
- this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource);
+ private void initialize() {
+ this.jobRepository = context.getBean(JobRepository.class);
+ this.taskExecutionDao = context.getBean(TaskExecutionDao.class);
+ this.taskBatchDao = context.getBean(TaskBatchDao.class);
}
- private void createJobExecution(String name, BatchStatus status) {
- TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null);
- JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null);
- this.taskBatchDao.saveRelationship(taskExecution, jobExecution);
+ private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobRestartException {
+ TaskExecution taskExecution = taskExecutionDao.createTaskExecution(name, LocalDateTime.now(), new ArrayList<>(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(name, new JobParameters());
+ taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
- this.jobRepository.update(jobExecution);
+ jobExecution.setStartTime(LocalDateTime.now());
+ jobRepository.update(jobExecution);
}
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java
index 54b2a37e15..8c0926cd8e 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2020 the original author or authors.
+ * Copyright 2017-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,30 +16,27 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
+import java.time.LocalDateTime;
import java.util.ArrayList;
-import java.util.Date;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.StepExecution;
+import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
+import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRepository;
-import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean;
+import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.task.batch.listener.TaskBatchDao;
-import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao;
import org.springframework.cloud.task.repository.TaskExecution;
import org.springframework.cloud.task.repository.dao.TaskExecutionDao;
-import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean;
-import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.test.annotation.DirtiesContext;
-import org.springframework.test.context.junit4.SpringRunner;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
@@ -48,125 +45,124 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Documentation for the /jobs/executions/{id}/steps endpoint.
*
* @author Glenn Renfro
+ * @author Corneil du Plessis
*/
-@RunWith(SpringRunner.class)
-@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class })
+@SuppressWarnings({"NewClassNamingConvention","SameParameterValue"})
+@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class})
@DirtiesContext
-public class JobStepExecutionsDocumentation extends BaseDocumentation {
+class JobStepExecutionsDocumentation extends BaseDocumentation {
private final static String JOB_NAME = "DOCJOB";
- private static boolean initialized;
private JobRepository jobRepository;
- private TaskExecutionDao dao;
+
+ private TaskExecutionDao taskExecutionDao;
+
private TaskBatchDao taskBatchDao;
- @Before
- public void setup() throws Exception {
- if (!initialized) {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
- initialize();
- createJobExecution(JOB_NAME, BatchStatus.STARTED);
-
- documentation.dontDocument(() -> this.mockMvc.perform(
- post("/tasks/definitions")
- .param("name", "DOCJOB1")
- .param("definition", "timestamp --format='YYYY MM DD'"))
- .andExpect(status().isOk()));
- initialized = true;
- }
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
+ initialize();
+ createJobExecution(JOB_NAME, BatchStatus.STARTED);
+
+ documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/tasks/definitions").param("name", "DOCJOB1")
+ .param("definition", "timestamp --format='YYYY MM DD'"))
+ .andExpect(status().isOk()));
}
@Test
- public void listStepExecutionsForJob() throws Exception {
+ void listStepExecutionsForJob() throws Exception {
this.mockMvc.perform(
- get("/jobs/executions/{id}/steps", "1")
- .param("page", "0")
- .param("size", "10"))
+ get("/jobs/executions/{id}/steps", "1")
+ .param("page", "0")
+ .param("size", "10"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)")),
- pathParameters(parameterWithName("id")
- .description("The id of an existing job execution (required)")),
- responseFields(
- subsectionWithPath("_embedded.stepExecutionResourceList")
- .description("Contains a collection of Step Executions/"),
- subsectionWithPath("_links.self").description("Link to the job execution resource"),
- subsectionWithPath("page").description("Pagination properties"))));
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size")),
+ pathParameters(parameterWithName("id")
+ .description("The id of an existing job execution")),
+ responseFields(
+ subsectionWithPath("_embedded.stepExecutionResourceList")
+ .description("Contains a collection of Step Executions/"),
+ subsectionWithPath("_links.self").description("Link to the job execution resource"),
+ subsectionWithPath("page").description("Pagination properties"))));
}
@Test
- public void stepDetail() throws Exception {
+ void stepDetail() throws Exception {
this.mockMvc.perform(
- get("/jobs/executions/{id}/steps/{stepid}", "1", "1"))
- .andExpect(status().isOk()).andDo(this.documentationHandler.document(
- pathParameters(
- parameterWithName("id").description("The id of an existing job execution (required)"),
- parameterWithName("stepid")
- .description("The id of an existing step execution for a specific job execution (required)")
- ),
- responseFields(
- fieldWithPath("jobExecutionId").description("The ID of the job step execution"),
- fieldWithPath("stepType").description("The type of the job step execution"),
- subsectionWithPath("stepExecution").description("The step details of the job step execution"),
- subsectionWithPath("_links.self").description("Link to the job step execution resource")
- )
- ));
+ get("/jobs/executions/{id}/steps/{stepid}", "1", "1"))
+ .andExpect(status().isOk()).andDo(this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("id").description("The id of an existing job execution"),
+ parameterWithName("stepid")
+ .description("The id of an existing step execution for a specific job execution")
+ ),
+ responseFields(
+ fieldWithPath("jobExecutionId").description("The ID of the job step execution"),
+ fieldWithPath("stepType").description("The type of the job step execution"),
+ subsectionWithPath("stepExecution").description("The step details of the job step execution"),
+ subsectionWithPath("_links.self").description("Link to the job step execution resource"),
+ subsectionWithPath("_links.progress").description("Link to retrieve the progress")
+ )
+ ));
}
@Test
- public void stepProgress() throws Exception {
+ void stepProgress() throws Exception {
this.mockMvc.perform(
- get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1"))
- .andExpect(status().isOk()).andDo(this.documentationHandler.document(
- pathParameters(
- parameterWithName("id").description("The id of an existing job execution (required)"),
- parameterWithName("stepid")
- .description("The id of an existing step execution for a specific job execution (required)")
- ),
- responseFields(
- subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"),
- subsectionWithPath("stepExecutionHistory")
- .description("The history of the job step execution"),
- fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"),
- fieldWithPath("finished").description("The status finished of the job step execution"),
- fieldWithPath("duration").description("The duration of the job step execution"),
- subsectionWithPath("_links.self").description("Link to the job step execution resource")
- )
- ));
+ get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1"))
+ .andExpect(status().isOk()).andDo(this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("id").description("The id of an existing job execution"),
+ parameterWithName("stepid")
+ .description("The id of an existing step execution for a specific job execution")
+ ),
+ responseFields(
+ subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"),
+ subsectionWithPath("stepExecutionHistory")
+ .description("The history of the job step execution"),
+ fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"),
+ fieldWithPath("finished").description("The status finished of the job step execution"),
+ fieldWithPath("duration").description("The duration of the job step execution"),
+ subsectionWithPath("_links.self").description("Link to the job step execution resource"),
+ subsectionWithPath("_links.progress").description("Link to the job step progress")
+ )
+ ));
}
- private void initialize() throws Exception {
- JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean();
- repositoryFactoryBean.setDataSource(this.dataSource);
- repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource));
- this.jobRepository = repositoryFactoryBean.getObject();
- this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject();
- this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource);
+ private void initialize() {
+ this.jobRepository = context.getBean(JobRepository.class);
+ this.taskExecutionDao = context.getBean(TaskExecutionDao.class);
+ this.taskBatchDao = context.getBean(TaskBatchDao.class);
}
- private void createJobExecution(String name, BatchStatus status) {
- TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null);
- JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null);
+ private void createJobExecution(String name, BatchStatus status) throws JobInstanceAlreadyCompleteException,
+ JobExecutionAlreadyRunningException, JobRestartException {
+ TaskExecution taskExecution = taskExecutionDao.createTaskExecution(name, LocalDateTime.now(), new ArrayList<>(), null);
+ JobExecution jobExecution = jobRepository.createJobExecution(name, new JobParameters());
StepExecution stepExecution = new StepExecution(name + "_STEP", jobExecution, jobExecution.getId());
stepExecution.setId(null);
jobRepository.add(stepExecution);
- this.taskBatchDao.saveRelationship(taskExecution, jobExecution);
+ taskBatchDao.saveRelationship(taskExecution, jobExecution);
jobExecution.setStatus(status);
- jobExecution.setStartTime(new Date());
- this.jobRepository.update(jobExecution);
+ jobExecution.setStartTime(LocalDateTime.now());
+ jobRepository.update(jobExecution);
}
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java
index 8a40bae482..f5ca367b80 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeAppsDocumentation.java
@@ -19,9 +19,9 @@
import java.util.ArrayList;
import java.util.List;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.skipper.domain.Info;
@@ -41,26 +41,28 @@
*
* @author Eric Bottard
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
+@SuppressWarnings("NewClassNamingConvention")
@DirtiesContext
-public class RuntimeAppsDocumentation extends BaseDocumentation {
+class RuntimeAppsDocumentation extends BaseDocumentation {
- @Before
- public void setup() throws Exception {
- registerApp(ApplicationType.source, "http", "1.2.0.RELEASE");
- registerApp(ApplicationType.sink, "log", "1.2.0.RELEASE");
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.source, "http", "5.0.0");
+ registerApp(ApplicationType.sink, "log", "5.0.0");
createStream("mystream", "http | log", true);
}
- @After
- public void cleanup() throws Exception {
+ @AfterEach
+ void cleanup() throws Exception {
destroyStream("mystream");
unregisterApp(ApplicationType.source, "http");
unregisterApp(ApplicationType.sink, "log");
}
@Test
- public void listRuntimeStreamStatus() throws Exception {
+ void listRuntimeStreamStatus() throws Exception {
this.mockMvc.perform(
get("/runtime/streams")
.accept(MediaType.APPLICATION_JSON)
@@ -70,7 +72,7 @@ public void listRuntimeStreamStatus() throws Exception {
}
@Test
- public void listRuntimeStreamStatusV2() throws Exception {
+ void listRuntimeStreamStatusV2() throws Exception {
this.mockMvc.perform(
get("/runtime/streams/status")
.accept(MediaType.APPLICATION_JSON)
@@ -80,7 +82,7 @@ public void listRuntimeStreamStatusV2() throws Exception {
}
@Test
- public void listAllApps() throws Exception {
+ void listAllApps() throws Exception {
this.mockMvc.perform(
get("/runtime/apps")
.accept(MediaType.APPLICATION_JSON)
@@ -90,7 +92,7 @@ public void listAllApps() throws Exception {
}
@Test
- public void listSingleAppAllInstances() throws Exception {
+ void listSingleAppAllInstances() throws Exception {
Info info = new Info();
info.setStatus(new Status());
@@ -118,7 +120,7 @@ public void listSingleAppAllInstances() throws Exception {
}
@Test
- public void getSingleAppSingleInstance() throws Exception {
+ void getSingleAppSingleInstance() throws Exception {
Info info = new Info();
info.setStatus(new Status());
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java
index cd985e1669..7bfbf210be 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation.java
@@ -16,8 +16,7 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.springframework.http.MediaType;
@@ -26,12 +25,13 @@
/**
* @author Gunnar Hillert
+ * @author Corneil du Plessis
*/
-@Ignore
-public class RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+class RuntimeStreamStatusForStreamAppsWithoutCollectorDocumentation extends BaseDocumentation {
@Test
- public void getMetricsWithoutCollectorRunning() throws Exception {
+ void getMetricsWithoutCollectorRunning() throws Exception {
this.mockMvc.perform(get("/runtime/streams")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk());
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java
index a90d004051..5288fe3c5b 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDefinitionsDocumentation.java
@@ -18,12 +18,13 @@
import java.util.Arrays;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer.MethodName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.core.ApplicationType;
+import org.springframework.test.annotation.DirtiesContext;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.delete;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
@@ -33,7 +34,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -42,45 +43,39 @@
*
* @author Gunnar Hillert
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class StreamDefinitionsDocumentation extends BaseDocumentation {
-
- private static boolean setUpIsDone = false;
-
- @Before
- public void setup() throws Exception {
- if (setUpIsDone) {
- return;
- }
-
-
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodName.class)
+@DirtiesContext
+class StreamDefinitionsDocumentation extends BaseDocumentation {
+ @BeforeEach
+ void setup() throws Exception {
this.mockMvc.perform(
post("/apps/{type}/time", "source")
- .param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE"))
+ .queryParam("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:5.0.0"))
.andExpect(status().isCreated());
this.mockMvc.perform(
post("/apps/{type}/log", "sink")
- .param("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE"))
+ .queryParam("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:5.0.0"))
.andExpect(status().isCreated());
- setUpIsDone = true;
}
@Test
- public void createDefinition() throws Exception {
+ void createDefinition() throws Exception {
this.mockMvc.perform(
post("/streams/definitions")
- .param("name", "timelog")
- .param("definition", "time --format='YYYY MM DD' | log")
- .param("description", "Demo stream for testing")
- .param("deploy", "false"))
+ .queryParam("name", "timelog")
+ .queryParam("definition", "time --format='YYYY MM DD' | log")
+ .queryParam("description", "Demo stream for testing")
+ .queryParam("deploy", "false"))
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("name").description("The name for the created task definitions"),
parameterWithName("definition").description("The definition for the stream, using Data Flow DSL"),
parameterWithName("description").description("The description of the stream definition"),
- parameterWithName("deploy")
+ parameterWithName("deploy").optional()
.description("If true, the stream is deployed upon creation (default is false)")),
responseFields(
fieldWithPath("name").description("The name of the created stream definition"),
@@ -96,21 +91,27 @@ public void createDefinition() throws Exception {
}
@Test
- public void listAllStreamDefinitions() throws Exception {
+ void listAllStreamDefinitions() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/streams/definitions").queryParam("name", "timelog")
+ .queryParam("definition", "time --format='YYYY MM DD' | log")
+ .queryParam("description", "Demo stream for testing")
+ .queryParam("deploy", "false"))
+ .andExpect(status().isCreated()));
this.mockMvc.perform(
get("/streams/definitions")
- .param("page", "0")
- .param("sort", "name,ASC")
- .param("search", "")
- .param("size", "10"))
- .andDo(print())
+ .queryParam("page", "0")
+ .queryParam("sort", "name,ASC")
+ .queryParam("search", "")
+ .queryParam("size", "10"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page").description("The zero-based page number (optional)"),
- parameterWithName("search").description("The search string performed on the name (optional)"),
- parameterWithName("sort").description("The sort on the list (optional)"),
- parameterWithName("size").description("The requested page size (optional)")),
+ queryParameters(
+ parameterWithName("page").optional().description("The zero-based page number"),
+ parameterWithName("search").optional().description("The search string performed on the name"),
+ parameterWithName("sort").optional().description("The sort on the list"),
+ parameterWithName("size").optional().description("The requested page size")),
responseFields(
subsectionWithPath("_embedded.streamDefinitionResourceList")
.description("Contains a collection of Stream Definitions"),
@@ -119,14 +120,20 @@ public void listAllStreamDefinitions() throws Exception {
}
@Test
- public void getStreamDefinition() throws Exception {
+ void getStreamDefinition() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/streams/definitions").queryParam("name", "timelog")
+ .queryParam("definition", "time --format='YYYY MM DD' | log")
+ .queryParam("description", "Demo stream for testing")
+ .queryParam("deploy", "false"))
+ .andExpect(status().isCreated()));
this.mockMvc.perform(
get("/streams/definitions/{name}", "timelog"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("name").description("The name of the stream definition to query (required)")
+ parameterWithName("name").description("The name of the stream definition to query")
),
responseFields(
fieldWithPath("name").description("The name of the stream definition"),
@@ -141,15 +148,14 @@ public void getStreamDefinition() throws Exception {
}
@Test
- public void getStreamApplications() throws Exception {
+ void getStreamApplications() throws Exception {
createStream("mysamplestream", "time | log", false);
this.mockMvc.perform(
get("/streams/definitions/{name}/applications", "mysamplestream"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("name").description("The name of the stream definition to query (required)")
+ parameterWithName("name").description("The name of the stream definition to query")
),
responseFields(
fieldWithPath("[]").description("An array of applications"),
@@ -158,6 +164,7 @@ public void getStreamApplications() throws Exception {
fieldWithPath("[].type").description("The type of the application. One of " + Arrays
.asList(ApplicationType.values())),
fieldWithPath("[].uri").description("The uri of the application"),
+ fieldWithPath("[].metaDataUri").description("The uri of the application metadata"),
fieldWithPath("[].version").description("The version of the application"),
fieldWithPath("[].defaultVersion").description("If true, the application is the default version"),
fieldWithPath("[].versions").description("All the registered versions of the application"),
@@ -166,26 +173,32 @@ public void getStreamApplications() throws Exception {
}
@Test
- public void listRelatedStreamDefinitions() throws Exception {
+ void listRelatedStreamDefinitions() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/streams/definitions").queryParam("name", "timelog")
+ .queryParam("definition", "time --format='YYYY MM DD' | log")
+ .queryParam("description", "Demo stream for testing")
+ .queryParam("deploy", "false"))
+ .andExpect(status().isCreated()));
this.mockMvc.perform(
get("/streams/definitions/{name}/related", "timelog")
- .param("page", "0")
- .param("sort", "name,ASC")
- .param("search", "")
- .param("size", "10")
- .param("nested", "true"))
- .andDo(print())
+ .queryParam("page", "0")
+ .queryParam("sort", "name,ASC")
+ .queryParam("search", "")
+ .queryParam("size", "10")
+ .queryParam("nested", "true"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("nested")
- .description("Should we recursively findByTaskNameContains for related stream definitions (optional)"),
- parameterWithName("page").description("The zero-based page number (optional)"),
- parameterWithName("search").description("The search string performed on the name (optional)"),
- parameterWithName("sort").description("The sort on the list (optional)"),
- parameterWithName("size").description("The requested page size (optional)")),
+ queryParameters(
+ parameterWithName("nested").optional()
+ .description("Should we recursively findByTaskNameContains for related stream definitions"),
+ parameterWithName("page").optional().description("The zero-based page number"),
+ parameterWithName("search").optional().description("The search string performed on the name"),
+ parameterWithName("sort").optional().description("The sort on the list"),
+ parameterWithName("size").optional().description("The requested page size")),
pathParameters(parameterWithName("name")
- .description("The name of an existing stream definition (required)")),
+ .description("The name of an existing stream definition")),
responseFields(
subsectionWithPath("_embedded.streamDefinitionResourceList")
.description("Contains a collection of Stream Definitions"),
@@ -195,22 +208,34 @@ public void listRelatedStreamDefinitions() throws Exception {
}
@Test
- public void streamDefinitionDelete1() throws Exception {
+ void streamDefinitionDelete1() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/streams/definitions").queryParam("name", "timelog")
+ .queryParam("definition", "time --format='YYYY MM DD' | log")
+ .queryParam("description", "Demo stream for testing")
+ .queryParam("deploy", "false"))
+ .andExpect(status().isCreated()));
this.mockMvc.perform(
delete("/streams/definitions/{name}", "timelog"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("name")
- .description("The name of an existing stream definition (required)"))
+ .description("The name of an existing stream definition"))
));
}
@Test
- public void streamDefinitionDeleteAll() throws Exception {
+ void streamDefinitionDeleteAll() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/streams/definitions").queryParam("name", "timelog")
+ .queryParam("definition", "time --format='YYYY MM DD' | log")
+ .queryParam("description", "Demo stream for testing")
+ .queryParam("deploy", "false"))
+ .andExpect(status().isCreated()));
this.mockMvc.perform(
delete("/streams/definitions"))
- .andDo(print())
.andExpect(status().isOk());
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java
index e6bdf4afd3..aa65fe2746 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamDeploymentsDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2018 the original author or authors.
+ * Copyright 2017-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,22 +17,23 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
import java.io.IOException;
-import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer.MethodName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.rest.UpdateStreamRequest;
import org.springframework.cloud.skipper.domain.PackageIdentifier;
import org.springframework.cloud.skipper.domain.Release;
import org.springframework.cloud.skipper.domain.RollbackRequest;
import org.springframework.http.MediaType;
+import org.springframework.test.annotation.DirtiesContext;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
@@ -41,26 +42,22 @@
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author Glenn Renfro
* @author Ilayaperumal Gopinathan
* @author Christian Tzolov
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodName.class)
+@DirtiesContext
public class StreamDeploymentsDocumentation extends BaseDocumentation {
- private static boolean setUpIsDone = false;
-
- @Before
- public void setup() throws Exception {
- if (setUpIsDone) {
- return;
- }
-
+ @BeforeEach
+ void setup() throws Exception {
this.mockMvc.perform(
post("/apps/{type}/time", "source")
.param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE")
@@ -83,41 +80,39 @@ public void setup() throws Exception {
.param("definition", "time --format='YYYY MM DD' | log")
.param("deploy", "false"))
.andExpect(status().isCreated());
- setUpIsDone = true;
}
@Test
- public void scale() throws Exception {
+ void scale() throws Exception {
String json = "{\"app.time.timestamp.format\":\"YYYY\"}";
this.mockMvc.perform(
post("/streams/deployments/scale/{streamName}/{appName}/instances/{count}", "timelog", "log", 1)
.contentType(MediaType.APPLICATION_JSON)
.content(json))
- .andDo(print())
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(pathParameters(
parameterWithName("streamName")
- .description("the name of an existing stream definition (required)"),
- parameterWithName("appName")
+ .description("the name of an existing stream definition"),
+ parameterWithName("appName").optional()
.description("in stream application name to scale"),
parameterWithName("count")
- .description("number of instances for the selected stream application (required)"))
+ .description("number of instances for the selected stream application"))
));
}
@Test
- public void unDeploy() throws Exception {
+ void unDeploy() throws Exception {
this.mockMvc.perform(
delete("/streams/deployments/{timelog}", "timelog"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("timelog")
- .description("The name of an existing stream definition (required)"))
+ .description("The name of an existing stream definition"))
));
}
@Test
- public void unDeployAll() throws Exception {
+ void unDeployAll() throws Exception {
this.mockMvc.perform(
delete("/streams/deployments"))
.andExpect(status().isOk())
@@ -126,7 +121,7 @@ public void unDeployAll() throws Exception {
@Test
- public void info() throws Exception {
+ void info() throws Exception {
String json = "{\"app.time.timestamp.format\":\"YYYY\"}";
this.mockMvc.perform(
get("/streams/deployments/{timelog}?reuse-deployment-properties=true", "timelog")
@@ -135,47 +130,44 @@ public void info() throws Exception {
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("timelog")
- .description("The name of an existing stream definition (required)")),
- requestParameters(parameterWithName("reuse-deployment-properties")
+ .description("The name of an existing stream definition")),
+ queryParameters(parameterWithName("reuse-deployment-properties").optional()
.description(parameterWithName("The name of the flag to reuse the deployment properties")))
));
}
@Test
- public void deploy() throws Exception {
+ void deploy() throws Exception {
String json = "{\"app.time.timestamp.format\":\"YYYY\"}";
this.mockMvc.perform(
post("/streams/deployments/{timelog}", "timelog")
.contentType(MediaType.APPLICATION_JSON)
.content(json))
- .andDo(print())
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("timelog")
- .description("The name of an existing stream definition (required)"))
+ .description("The name of an existing stream definition"))
));
}
@Test
- public void streamUpdate() throws Exception {
+ void streamUpdate() throws Exception {
String json = "{\"app.time.timestamp.format\":\"YYYY\"}";
this.mockMvc.perform(
post("/streams/deployments/{timelog1}", "timelog1")
.contentType(MediaType.APPLICATION_JSON)
.content(json))
- .andDo(print())
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("timelog1")
- .description("The name of an existing stream definition (required)"))
+ .description("The name of an existing stream definition"))
));
- Thread.sleep(30000);
UpdateStreamRequest updateStreamRequest = new UpdateStreamRequest();
updateStreamRequest.setReleaseName("timelog1");
Map updateProperties = new HashMap<>();
updateProperties.put("app.time.timestamp.format", "YYYYMMDD");
updateStreamRequest.setUpdateProperties(updateProperties);
- final String releaseName = "myLogRelease";
+
final PackageIdentifier packageIdentifier = new PackageIdentifier();
packageIdentifier.setPackageName("timelog1");
packageIdentifier.setPackageVersion("1.0.0");
@@ -186,73 +178,65 @@ public void streamUpdate() throws Exception {
post("/streams/deployments/update/{timelog1}", "timelog1")
.contentType(MediaType.APPLICATION_JSON)
.content(convertObjectToJson(updateStreamRequest)))
- .andDo(print())
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("timelog1")
- .description("The name of an existing stream definition (required)"))
+ .description("The name of an existing stream definition"))
));
- Thread.sleep(30000);
}
@Test
- public void rollback() throws Exception {
- RollbackRequest rollbackRequest = new RollbackRequest();
+ void rollback() throws Exception {
+ final RollbackRequest rollbackRequest = new RollbackRequest();
rollbackRequest.setReleaseName("timelog1");
this.mockMvc.perform(
post("/streams/deployments/rollback/{name}/{version}", "timelog1", 1)
.contentType(MediaType.APPLICATION_JSON))
- .andDo(print())
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("name")
- .description("The name of an existing stream definition (required)"),
+ .description("The name of an existing stream definition"),
parameterWithName("version").description("The version to rollback to"))));
- Thread.sleep(30000);
}
@Test
- public void history() throws Exception {
- when(this.springDataflowServer.getSkipperClient().history(anyString()))
- .thenReturn(Arrays.asList(new Release()));
+ void history() throws Exception {
+ when(springDataflowServer.getSkipperClient().history(anyString()))
+ .thenReturn(Collections.singletonList(new Release()));
this.mockMvc.perform(
get("/streams/deployments/history/{name}", "timelog1")
.contentType(MediaType.APPLICATION_JSON))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("name")
- .description("The name of an existing stream definition (required)"))));
+ .description("The name of an existing stream definition"))));
}
@Test
- public void manifest() throws Exception {
+ void manifest() throws Exception {
this.mockMvc.perform(
get("/streams/deployments/manifest/{name}/{version}", "timelog1", 1)
.contentType(MediaType.APPLICATION_JSON))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("name")
- .description("The name of an existing stream definition (required)"),
+ .description("The name of an existing stream definition"),
parameterWithName("version").description("The version of the stream"))));
}
@Test
- public void platformList() throws Exception {
+ void platformList() throws Exception {
this.mockMvc.perform(
get("/streams/deployments/platform/list")
.contentType(MediaType.APPLICATION_JSON))
- .andDo(print())
.andExpect(status().isOk());
}
public static String convertObjectToJson(Object object) throws IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
- String json = mapper.writeValueAsString(object);
- return json;
+ return mapper.writeValueAsString(object);
}
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java
index 5e60efb9ee..eaaa28c129 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamLogsDocumentation.java
@@ -19,27 +19,28 @@
import java.util.HashMap;
import java.util.Map;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.skipper.domain.LogInfo;
import static org.mockito.Mockito.when;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Documentation for the {@code /streams/logs} endpoint.
*
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class StreamLogsDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class StreamLogsDocumentation extends BaseDocumentation {
@Test
- public void getLogsByStreamName() throws Exception {
+ void getLogsByStreamName() throws Exception {
LogInfo logInfo = new LogInfo();
Map logs = new HashMap<>();
logs.put("ticktock-log-v1", "Logs-log");
@@ -48,13 +49,12 @@ public void getLogsByStreamName() throws Exception {
when(springDataflowServer.getSkipperClient().getLog("ticktock")).thenReturn(logInfo);
this.mockMvc.perform(
get("/streams/logs/ticktock"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document());
}
@Test
- public void getLogsByAppName() throws Exception {
+ void getLogsByAppName() throws Exception {
LogInfo logInfo = new LogInfo();
Map logs = new HashMap<>();
logs.put("ticktock-log-v1", "Logs-log");
@@ -62,7 +62,6 @@ public void getLogsByAppName() throws Exception {
when(springDataflowServer.getSkipperClient().getLog("ticktock", "ticktock-log-v1")).thenReturn(logInfo);
this.mockMvc.perform(
get("/streams/logs/ticktock/ticktock-log-v1"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document());
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java
index 6945376216..04e0a52b39 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/StreamValidationDocumentation.java
@@ -16,11 +16,6 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath;
@@ -30,22 +25,23 @@
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
+
/**
* Documentation for the /streams/validation endpoint.
*
* @author Glenn Renfro
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class StreamValidationDocumentation extends BaseDocumentation {
-
- private static boolean setUpIsDone = false;
-
- @Before
- public void setup() throws Exception {
- if (setUpIsDone) {
- return;
- }
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class StreamValidationDocumentation extends BaseDocumentation {
+ @BeforeEach
+ void setup() throws Exception {
this.mockMvc.perform(
post("/apps/{type}/time", "source")
.param("uri", "maven://org.springframework.cloud.stream.app:time-source-rabbit:1.2.0.RELEASE")
@@ -56,11 +52,10 @@ public void setup() throws Exception {
.param("uri", "maven://org.springframework.cloud.stream.app:log-sink-rabbit:1.2.0.RELEASE")
.param("force", "true"))
.andExpect(status().isCreated());
- setUpIsDone = true;
}
@Test
- public void validateStream() throws Exception {
+ void validateStream() throws Exception {
this.mockMvc.perform(
post("/streams/definitions")
.param("name", "timelog")
@@ -74,7 +69,7 @@ public void validateStream() throws Exception {
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("name").description("The name of a stream definition to be validated (required)")
+ parameterWithName("name").description("The name of a stream definition to be validated")
),
responseFields(
fieldWithPath("appName").description("The name of a stream definition"),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java
index 44c11c2566..a67a028a9b 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskDefinitionsDocumentation.java
@@ -16,11 +16,11 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.core.ApplicationType;
@@ -32,7 +32,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -41,34 +41,35 @@
*
* @author Eric Bottard
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class TaskDefinitionsDocumentation extends BaseDocumentation {
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TaskDefinitionsDocumentation extends BaseDocumentation {
-
- @Before
- public void setup() throws Exception {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
}
- @After
- public void tearDown() throws Exception {
+ @AfterEach
+ void tearDown() throws Exception {
unregisterApp(ApplicationType.task, "timestamp");
}
@Test
- public void createDefinition() throws Exception {
+ void createDefinition() throws Exception {
this.mockMvc.perform(
post("/tasks/definitions")
- .param("name", "my-task")
- .param("definition", "timestamp --format='YYYY MM DD'")
- .param("description", "Demo task definition for testing"))
+ .queryParam("name", "my-task")
+ .queryParam("definition", "timestamp --format='YYYY MM DD'")
+ .queryParam("description", "Demo task definition for testing"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("name").description("The name for the created task definition"),
parameterWithName("definition").description("The definition for the task, using Data Flow DSL"),
- parameterWithName("description").description("The description of the task definition")
+ parameterWithName("description").optional().description("The description of the task definition")
),
responseFields(
fieldWithPath("name").description("The name of the created task definition"),
@@ -85,24 +86,29 @@ public void createDefinition() throws Exception {
}
@Test
- public void listAllTaskDefinitions() throws Exception {
+ void listAllTaskDefinitions() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/tasks/definitions").queryParam("name", "my-task")
+ .queryParam("definition", "timestamp --format='YYYY MM DD'")
+ .queryParam("description", "Demo task definition for testing"))
+ .andExpect(status().isOk()));
this.mockMvc.perform(
get("/tasks/definitions")
- .param("page", "0")
- .param("size", "10")
- .param("sort", "taskName,ASC")
- .param("search", "")
- .param("manifest", "true")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("sort", "taskName,ASC")
+ .queryParam("taskName", "")
+ .queryParam("manifest", "true")
)
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page").description("The zero-based page number (optional)"),
- parameterWithName("size").description("The requested page size (optional)"),
- parameterWithName("search").description("The search string performed on the name (optional)"),
- parameterWithName("sort").description("The sort on the list (optional)"),
- parameterWithName("manifest").description("The flag to include the task manifest into the latest task execution (optional)")
+ queryParameters(
+ parameterWithName("page").optional().description("The zero-based page number"),
+ parameterWithName("size").optional().description("The requested page size"),
+ parameterWithName("taskName").optional().description("The task name to limit the results"),
+ parameterWithName("sort").optional().description("The sort on the list"),
+ parameterWithName("manifest").optional().description("The flag to include the task manifest into the latest task execution")
),
responseFields(
subsectionWithPath("_embedded.taskDefinitionResourceList")
@@ -112,18 +118,23 @@ public void listAllTaskDefinitions() throws Exception {
}
@Test
- public void displayDetail() throws Exception {
+ void displayDetail() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/tasks/definitions").queryParam("name", "my-task")
+ .queryParam("definition", "timestamp --format='YYYY MM DD'")
+ .queryParam("description", "Demo task definition for testing"))
+ .andExpect(status().isOk()));
this.mockMvc.perform(
get("/tasks/definitions/{my-task}","my-task")
- .param("manifest", "true"))
- .andDo(print())
+ .queryParam("manifest", "true"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("my-task").description("The name of an existing task definition (required)")
+ parameterWithName("my-task").description("The name of an existing task definition")
),
- requestParameters(
- parameterWithName("manifest").description("The flag to include the task manifest into the latest task execution (optional)")
+ queryParameters(
+ parameterWithName("manifest").optional().description("The flag to include the task manifest into the latest task execution")
),
responseFields(
fieldWithPath("name").description("The name of the created task definition"),
@@ -140,17 +151,22 @@ public void displayDetail() throws Exception {
}
@Test
- public void taskDefinitionDelete() throws Exception {
+ void taskDefinitionDelete() throws Exception {
+ this.documentation.dontDocument(
+ () -> this.mockMvc
+ .perform(post("/tasks/definitions").queryParam("name", "my-task")
+ .queryParam("definition", "timestamp --format='YYYY MM DD'")
+ .queryParam("description", "Demo task definition for testing"))
+ .andExpect(status().isOk()));
this.mockMvc.perform(
delete("/tasks/definitions/{my-task}", "my-task")
- .param("cleanup", "true"))
- .andDo(print())
+ .queryParam("cleanup", "true"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("my-task").description("The name of an existing task definition (required)")),
- requestParameters(
- parameterWithName("cleanup").description("The flag to indicate if the associated task executions needed to be cleaned up")
+ parameterWithName("my-task").description("The name of an existing task definition")),
+ queryParameters(
+ parameterWithName("cleanup").optional().description("The flag to indicate if the associated task executions needed to be cleaned up")
)
));
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java
index 3db635cda1..9211b54e60 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2017-2019 the original author or authors.
+ * Copyright 2017-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,13 +16,19 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import java.util.concurrent.atomic.AtomicReference;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.core.ApplicationType;
+import org.springframework.restdocs.payload.JsonFieldType;
+import org.springframework.test.web.servlet.MvcResult;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.delete;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
@@ -32,7 +38,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -43,67 +49,101 @@
* @author Glenn Renfro
* @author David Turanski
* @author Gunnar Hillert
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TaskExecutionsDocumentation extends BaseDocumentation {
-
- @Before
- public void setup() throws Exception {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class TaskExecutionsDocumentation extends BaseDocumentation {
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
createTaskDefinition("taskA");
createTaskDefinition("taskB");
-
+ executeTask("taskA");
+ executeTask("taskB");
}
- @After
- public void tearDown() throws Exception {
+
+ @AfterEach
+ void tearDown() throws Exception {
+ cleanupTaskExecutions("taskA");
+ cleanupTaskExecutions("taskB");
destroyTaskDefinition("taskA");
destroyTaskDefinition("taskB");
unregisterApp(ApplicationType.task, "timestamp");
}
@Test
- public void launchTask() throws Exception {
+ void launchTaskBoot3() throws Exception {
this.mockMvc.perform(
- post("/tasks/executions")
- .param("name", "taskA")
- .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
- .param("arguments", "--server.port=8080 --foo=bar"))
+ post("/tasks/executions/launch")
+ .queryParam("name", "taskA")
+ .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
+ .queryParam("arguments", "--server.port=8080 --foo=bar")
+ )
+ .andExpect(status().isCreated())
+ .andDo(this.documentationHandler.document(
+ queryParameters(
+ parameterWithName("name").description("The name of the task definition to launch"),
+ parameterWithName("properties").optional()
+ .description("Application and Deployer properties to use while launching."),
+ parameterWithName("arguments").optional()
+ .description("Command line arguments to pass to the task.")),
+ responseFields(
+ fieldWithPath("executionId").description("The id of the task execution"),
+ subsectionWithPath("_links.self").description("Link to the task execution resource"),
+ subsectionWithPath("_links.tasks/logs").type(fieldWithPath("_links.tasks/logs").ignored().optional()).description("Link to the task execution logs").optional()
+ )
+ )
+ );
+ }
+
+ @Test
+ void launchTask() throws Exception {
+ this.mockMvc.perform(
+ post("/tasks/executions")
+ .queryParam("name", "taskA")
+ .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
+ .queryParam("arguments", "--server.port=8080 --foo=bar")
+ )
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("name").description("The name of the task definition to launch"),
- parameterWithName("properties").optional()
- .description("Application and Deployer properties to use while launching"),
- parameterWithName("arguments").optional()
- .description("Command line arguments to pass to the task"))));
+ queryParameters(
+ parameterWithName("name").description("The name of the task definition to launch"),
+ parameterWithName("properties").optional()
+ .description("Application and Deployer properties to use while launching."),
+ parameterWithName("arguments").optional()
+ .description("Command line arguments to pass to the task.")
+ )
+ )
+ );
}
@Test
- public void getTaskCurrentCount() throws Exception {
+ void getTaskCurrentCount() throws Exception {
this.mockMvc.perform(
- get("/tasks/executions/current"))
- .andDo(print())
+ get("/tasks/executions/current")
+ )
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- responseFields(
- fieldWithPath("[].name").description("The name of the platform instance (account)"),
- fieldWithPath("[].type").description("The platform type"),
- fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"),
- fieldWithPath("[].runningExecutionCount").description("The number of running executions")
- )
+ responseFields(
+ fieldWithPath("[].name").description("The name of the platform instance (account)"),
+ fieldWithPath("[].type").description("The platform type"),
+ fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"),
+ fieldWithPath("[].runningExecutionCount").description("The number of running executions")
+ )
));
}
@Test
- public void launchTaskDisplayDetail() throws Exception {
+ void getTaskDisplayDetail() throws Exception {
this.mockMvc.perform(
- get("/tasks/executions/{id}", "1"))
- .andDo(print())
+ get("/tasks/executions/{id}", "1")
+ )
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("id").description("The id of an existing task execution (required)")
+ parameterWithName("id").description("The id of an existing task execution")
),
responseFields(
fieldWithPath("executionId").description("The id of the task execution"),
@@ -121,131 +161,301 @@ public void launchTaskDisplayDetail() throws Exception {
"null if task execution does not have parent"),
fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"),
subsectionWithPath("appProperties").description("The application properties of the task execution"),
- subsectionWithPath("deploymentProperties").description("The deployment properties of the task exectuion"),
subsectionWithPath("deploymentProperties").description("The deployment properties of the task execution"),
subsectionWithPath("platformName").description("The platform selected for the task execution"),
- subsectionWithPath("_links.self").description("Link to the task execution resource")
+ subsectionWithPath("_links.self").description("Link to the task execution resource"),
+ subsectionWithPath("_links.tasks/logs").description("Link to the task execution logs")
)
));
}
@Test
- public void listTaskExecutions() throws Exception {
- documentation.dontDocument( () -> this.mockMvc.perform(
- post("/tasks/executions")
- .param("name", "taskB")
- .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
- .param("arguments", "--server.port=8080 --foo=bar"))
+ void getTaskDisplayDetailByExternalId() throws Exception {
+ final AtomicReference externalExecutionId = new AtomicReference<>(null);
+ documentation.dontDocument(() -> {
+ MvcResult mvcResult = this.mockMvc.perform(
+ get("/tasks/executions")
+ .queryParam("page", "0")
+ .queryParam("size", "20"))
+ .andExpect(status().isOk()).andReturn();
+ ObjectMapper mapper = new ObjectMapper();
+ JsonNode node = mapper.readTree(mvcResult.getResponse().getContentAsString());
+ JsonNode list = node.get("_embedded").get("taskExecutionResourceList");
+ JsonNode first = list.get(0);
+ externalExecutionId.set(first.get("externalExecutionId").asText());
+ return externalExecutionId.get();
+ });
+
+ this.mockMvc.perform(
+ get("/tasks/executions/external/{externalExecutionId}", externalExecutionId.get()).queryParam("platform", "default")
+ )
+ .andExpect(status().isOk())
+ .andDo(this.documentationHandler.document(
+ pathParameters(
+ parameterWithName("externalExecutionId").description("The external ExecutionId of an existing task execution")
+ ),
+ queryParameters(
+ parameterWithName("platform").optional().description("The name of the platform.")
+ ),
+ responseFields(
+ fieldWithPath("executionId").description("The id of the task execution"),
+ fieldWithPath("exitCode").description("The exit code of the task execution"),
+ fieldWithPath("taskName").description("The task name related to the task execution"),
+ fieldWithPath("startTime").description("The start time of the task execution"),
+ fieldWithPath("endTime").description("The end time of the task execution"),
+ fieldWithPath("exitMessage").description("The exit message of the task execution"),
+ fieldWithPath("arguments").description("The arguments of the task execution"),
+ fieldWithPath("jobExecutionIds").description("The job executions ids of the task executions"),
+ fieldWithPath("errorMessage").description("The error message of the task execution"),
+ fieldWithPath("externalExecutionId").description("The external id of the task execution"),
+ fieldWithPath("taskExecutionStatus").description("The status of the task execution"),
+ fieldWithPath("parentExecutionId").description("The id of parent task execution, " +
+ "null if task execution does not have parent"),
+ fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"),
+ subsectionWithPath("appProperties").description("The application properties of the task execution"),
+ subsectionWithPath("deploymentProperties").description("The deployment properties of the task execution"),
+ subsectionWithPath("platformName").description("The platform selected for the task execution"),
+ subsectionWithPath("_links.self").description("Link to the task execution resource"),
+ subsectionWithPath("_links.tasks/logs").description("Link to the task execution logs")
+ )
+ ));
+ }
+
+ @Test
+ void listTaskExecutions() throws Exception {
+ documentation.dontDocument(() -> this.mockMvc.perform(
+ post("/tasks/executions")
+ .queryParam("name", "taskB")
+ .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
+ .queryParam("arguments", "--server.port=8080 --foo=bar")
+ )
.andExpect(status().isCreated()));
this.mockMvc.perform(
- get("/tasks/executions")
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ get("/tasks/executions")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("sort", "END_TIME,desc"))
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)")),
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("sort").optional()
+ .description("The sort criteria. column name and optional sort direction. Example: END_TIME,desc")
+ ),
+ responseFields(
+ subsectionWithPath("_embedded.taskExecutionResourceList")
+ .description("Contains a collection of Task Executions/"),
+ subsectionWithPath("_links.self").description("Link to the task execution resource"),
+ subsectionWithPath("_links.first")
+ .description("Link to the first page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("_links.last")
+ .description("Link to the last page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("_links.next")
+ .description("Link to the next page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("_links.prev")
+ .description("Link to the previous page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("page").description("Pagination properties"))));
+ }
+
+ @Test
+ void listTaskThinExecutions() throws Exception {
+ documentation.dontDocument(() -> this.mockMvc.perform(
+ post("/tasks/executions")
+ .queryParam("name", "taskB")
+ .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
+ .queryParam("arguments", "--server.port=8080 --foo=bar")
+ )
+ .andExpect(status().isCreated()));
+
+ this.mockMvc.perform(
+ get("/tasks/thinexecutions")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("sort", "END_TIME,desc")
+ )
+ .andExpect(status().isOk()).andDo(this.documentationHandler.document(
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("sort").optional()
+ .description("The sort criteria. column name and optional sort direction (optional). Example: END_TIME,desc")
+ ),
responseFields(
- subsectionWithPath("_embedded.taskExecutionResourceList")
- .description("Contains a collection of Task Executions/"),
- subsectionWithPath("_links.self").description("Link to the task execution resource"),
- subsectionWithPath("page").description("Pagination properties"))));
+ subsectionWithPath("_embedded.taskExecutionThinResourceList")
+ .description("Contains a collection of thin Task Executions/"),
+ subsectionWithPath("_links.self").description("Link to the task execution resource"),
+ subsectionWithPath("_links.first")
+ .description("Link to the first page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("_links.last")
+ .description("Link to the last page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("_links.next")
+ .description("Link to the next page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("_links.prev")
+ .description("Link to the previous page of task execution resources")
+ .type(JsonFieldType.OBJECT)
+ .optional(),
+ subsectionWithPath("page").description("Pagination properties"))));
}
@Test
- public void listTaskExecutionsByName() throws Exception {
+ void listTaskThinExecutionsByName() throws Exception {
this.mockMvc.perform(
- get("/tasks/executions")
- .param("name", "taskB")
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ get("/tasks/thinexecutions")
+ .queryParam("name", "taskB")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("sort", "END_TIME,desc")
+ )
.andExpect(status().isOk()).andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)"),
- parameterWithName("name")
- .description("The name associated with the task execution")),
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("name")
+ .description("The name associated with the task execution"),
+ parameterWithName("sort").optional()
+ .description("The sort criteria. column name and optional sort direction (optional). Example: END_TIME,desc")
+ ),
+ responseFields(
+ subsectionWithPath("_embedded.taskExecutionThinResourceList")
+ .description("Contains a collection of thin Task Executions/"),
+ subsectionWithPath("_links.self").description("Link to the task execution resource"),
+ subsectionWithPath("page").description("Pagination properties"))));
+ }
+ @Test
+ void listTaskExecutionsByName() throws Exception {
+ this.mockMvc.perform(
+ get("/tasks/executions")
+ .queryParam("name", "taskB")
+ .queryParam("page", "0")
+ .queryParam("size", "10")
+ .queryParam("sort", "END_TIME,desc")
+ )
+ .andExpect(status().isOk()).andDo(this.documentationHandler.document(
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size"),
+ parameterWithName("name")
+ .description("The name associated with the task execution"),
+ parameterWithName("sort").optional()
+ .description("The sort criteria. column name and optional sort direction (optional). Example: END_TIME,desc")),
responseFields(
- subsectionWithPath("_embedded.taskExecutionResourceList")
- .description("Contains a collection of Task Executions/"),
- subsectionWithPath("_links.self").description("Link to the task execution resource"),
- subsectionWithPath("page").description("Pagination properties"))));
+ subsectionWithPath("_embedded.taskExecutionResourceList")
+ .description("Contains a collection of Task Executions/"),
+ subsectionWithPath("_links.self").description("Link to the task execution resource"),
+ subsectionWithPath("page").description("Pagination properties"))));
}
@Test
- public void stopTask() throws Exception {
+ void stopTask() throws Exception {
this.mockMvc.perform(
- post("/tasks/executions")
- .param("name", "taskA")
- .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
- .param("arguments", "--server.port=8080 --foo=bar"))
+ post("/tasks/executions")
+ .queryParam("name", "taskA")
+ .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
+ .queryParam("arguments", "--server.port=8080 --foo=bar")
+ )
.andExpect(status().isCreated());
this.mockMvc.perform(
- post("/tasks/executions/{id}", 1)
- .param("platform", "default"))
- .andDo(print())
+ post("/tasks/executions/{id}", 1)
+ )
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- pathParameters(
- parameterWithName("id").description("The ids of an existing task execution (required)")
- ),
- requestParameters(parameterWithName("platform")
- .description("The platform associated with the task execution(optional)"))));
+ pathParameters(
+ parameterWithName("id").description("The ids of an existing task execution")
+ )
+ )
+ );
}
@Test
- public void taskExecutionRemove() throws Exception {
+ void taskExecutionRemove() throws Exception {
- documentation.dontDocument( () -> this.mockMvc.perform(
- post("/tasks/executions")
- .param("name", "taskB")
- .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
- .param("arguments", "--server.port=8080 --foo=bar"))
+ documentation.dontDocument(() -> this.mockMvc.perform(
+ post("/tasks/executions")
+ .queryParam("name", "taskB")
+ .queryParam("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3")
+ .queryParam("arguments", "--server.port=8080 --foo=bar"))
.andExpect(status().isCreated()));
this.mockMvc.perform(
- delete("/tasks/executions/{ids}?action=CLEANUP", "1"))
- .andDo(print())
+ delete("/tasks/executions/{ids}?action=CLEANUP", "1"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(parameterWithName("action").description("Optional. Defaults to: CLEANUP.")),
+ queryParameters(parameterWithName("action").optional().description("Defaults to: CLEANUP.")),
pathParameters(parameterWithName("ids")
- .description("The id of an existing task execution (required). Multiple comma separated values are accepted."))
+ .description("The id of an existing task execution. Multiple comma separated values are accepted."))
));
}
@Test
- public void taskExecutionRemoveAndTaskDataRemove() throws Exception {
+ void taskExecutionRemoveAndTaskDataRemove() throws Exception {
this.mockMvc.perform(
- delete("/tasks/executions/{ids}?action=CLEANUP,REMOVE_DATA", "1,2"))
- .andDo(print())
+ delete("/tasks/executions/{ids}?action=CLEANUP,REMOVE_DATA", "1,2"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(parameterWithName("action").description("Using both actions CLEANUP and REMOVE_DATA simultaneously.")),
+ queryParameters(
+ parameterWithName("action").optional().description("Using both actions CLEANUP and REMOVE_DATA simultaneously.")
+ ),
pathParameters(parameterWithName("ids")
- .description("Providing 2 comma separated task execution id values."))
+ .description("Providing 2 comma separated task execution id values.")
+ )
));
}
- private void createTaskDefinition(String taskName) throws Exception{
- documentation.dontDocument( () -> this.mockMvc.perform(
+ private void createTaskDefinition(String taskName) throws Exception {
+ documentation.dontDocument(() ->
+ this.mockMvc.perform(
post("/tasks/definitions")
- .param("name", taskName)
- .param("definition", "timestamp --format='yyyy MM dd'"))
- .andExpect(status().isOk()));
+ .queryParam("name", taskName)
+ .queryParam("definition", "timestamp --format='yyyy MM dd'")
+ )
+ );
+ }
+ private void cleanupTaskExecutions(String taskName) throws Exception {
+ documentation.dontDocument(() -> this.mockMvc.perform(
+ delete("/tasks/executions")
+ .queryParam("name", taskName)
+ )
+ );
+ }
+ private void destroyTaskDefinition(String taskName) throws Exception {
+ documentation.dontDocument(() ->
+ this.mockMvc.perform(
+ delete("/tasks/definitions/{name}", taskName)
+ )
+ );
}
- private void destroyTaskDefinition(String taskName) throws Exception{
- documentation.dontDocument( () -> this.mockMvc.perform(
- delete("/tasks/definitions/{name}", taskName))
- .andExpect(status().isOk()));
+ private void executeTask(String taskName) throws Exception {
+ documentation.dontDocument(() ->
+ this.mockMvc.perform(
+ post("/tasks/executions")
+ .queryParam("name", taskName)
+ .queryParam("arguments", "--server.port=8080 --foo=bar")
+ )
+ );
}
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java
index b5e78a9544..742811e486 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2019 the original author or authors.
+ * Copyright 2019-2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,31 +16,37 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import java.time.Duration;
+
+import org.awaitility.Awaitility;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository;
+import org.springframework.cloud.dataflow.server.service.TaskExecutionService;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Documentation for the {@code /tasks/logs} endpoint.
*
* @author Ilayaperumal Gopinathan
+ * @author Glenn Renfro
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TaskLogsDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class TaskLogsDocumentation extends BaseDocumentation {
@Test
- public void getLogsByTaskId() throws Exception {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
+ void getLogsByTaskId() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
String taskName = "taskA";
documentation.dontDocument( () -> this.mockMvc.perform(
post("/tasks/definitions")
@@ -53,15 +59,16 @@ public void getLogsByTaskId() throws Exception {
.andExpect(status().isCreated());
TaskDeploymentRepository taskDeploymentRepository =
springDataflowServer.getWebApplicationContext().getBean(TaskDeploymentRepository.class);
- Thread.sleep(30000);
+ TaskExecutionService service = springDataflowServer.getWebApplicationContext().getBean(TaskExecutionService.class);
+ Awaitility.await().atMost(Duration.ofMillis(30000)).until(() -> service.getLog("default",
+ taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName).getTaskDeploymentId()).length() > 0);
this.mockMvc.perform(
get("/tasks/logs/"+taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName)
.getTaskDeploymentId()).param("platformName", "default"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("platformName").description("The name of the platform the task is launched."))
+ queryParameters(
+ parameterWithName("platformName").optional().description("The name of the platform the task is launched."))
));
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java
index efb0cb7e34..36b2479e96 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskPlatformDocumentation.java
@@ -16,40 +16,40 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields;
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Documentation for the /tasks/platforms endpoint.
*
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TaskPlatformDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class TaskPlatformDocumentation extends BaseDocumentation {
@Test
- public void listTaskPlatforms() throws Exception {
+ void listTaskPlatforms() throws Exception {
this.mockMvc.perform(
get("/tasks/platforms")
.param("page", "0")
.param("size", "10"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)")),
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size")),
responseFields(
subsectionWithPath("_embedded.launcherResourceList")
.description("Contains a collection of Platform accounts for tasks"),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java
index f337d6cf89..bb8b4fdfae 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskSchedulerDocumentation.java
@@ -16,11 +16,11 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.core.ApplicationType;
@@ -31,7 +31,7 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
-import static org.springframework.restdocs.request.RequestDocumentation.requestParameters;
+import static org.springframework.restdocs.request.RequestDocumentation.queryParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -39,68 +39,70 @@
* Documentation for the /tasks/schedules endpoint.
*
* @author Glenn Renfro
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TaskSchedulerDocumentation extends BaseDocumentation {
+@SuppressWarnings({"NewClassNamingConvention","SameParameterValue"})
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class TaskSchedulerDocumentation extends BaseDocumentation {
- @Before
- public void setup() throws Exception {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
createTaskDefinition("mytaskname");
}
- @After
- public void tearDown() throws Exception {
+ @AfterEach
+ void tearDown() throws Exception {
destroyTaskDefinition("mytaskname");
unregisterApp(ApplicationType.task, "timestamp");
}
@Test
- public void createSchedule() throws Exception {
+ void createSchedule() throws Exception {
this.mockMvc.perform(
post("/tasks/schedules")
- .param("scheduleName", "myschedule")
- .param("taskDefinitionName", "mytaskname")
- .param("properties", "scheduler.cron.expression=00 22 17 ? *")
- .param("arguments", "--foo=bar"))
+ .queryParam("scheduleName", "myschedule")
+ .queryParam("taskDefinitionName", "mytaskname")
+ .queryParam("platform", "default")
+ .queryParam("properties", "deployer.cron.expression=00 22 17 ? *")
+ .queryParam("arguments", "--foo=bar"))
.andExpect(status().isCreated())
.andDo(this.documentationHandler.document(
- requestParameters(
+ queryParameters(
parameterWithName("scheduleName").description("The name for the created schedule"),
+ parameterWithName("platform").optional().description("The name of the platform the task is launched"),
parameterWithName("taskDefinitionName")
.description("The name of the task definition to be scheduled"),
parameterWithName("properties")
.description("the properties that are required to schedule and launch the task"),
- parameterWithName("arguments").description("the command line arguments to be used for launching the task"))));
+ parameterWithName("arguments").optional().description("the command line arguments to be used for launching the task"))));
}
@Test
- public void deleteSchedule() throws Exception {
+ void deleteSchedule() throws Exception {
this.mockMvc.perform(
delete("/tasks/schedules/{scheduleName}", "mytestschedule"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("scheduleName")
- .description("The name of an existing schedule (required)"))));
+ .description("The name of an existing schedule"))));
}
@Test
- public void listFilteredSchedules() throws Exception {
+ void listFilteredSchedules() throws Exception {
this.mockMvc.perform(
get("/tasks/schedules/instances/{task-definition-name}", "FOO")
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ .queryParam("page", "0")
+ .queryParam("size", "10"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(parameterWithName("task-definition-name")
- .description("Filter schedules based on the specified task definition (required)")),
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)")),
+ .description("Filter schedules based on the specified task definition")),
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size")),
responseFields(
subsectionWithPath("_embedded.scheduleInfoResourceList")
.description("Contains a collection of Schedules/"),
@@ -109,19 +111,18 @@ public void listFilteredSchedules() throws Exception {
}
@Test
- public void listAllSchedules() throws Exception {
+ void listAllSchedules() throws Exception {
this.mockMvc.perform(
get("/tasks/schedules")
- .param("page", "0")
- .param("size", "10"))
- .andDo(print())
+ .queryParam("page", "0")
+ .queryParam("size", "10"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
- requestParameters(
- parameterWithName("page")
- .description("The zero-based page number (optional)"),
- parameterWithName("size")
- .description("The requested page size (optional)")),
+ queryParameters(
+ parameterWithName("page").optional()
+ .description("The zero-based page number"),
+ parameterWithName("size").optional()
+ .description("The requested page size")),
responseFields(
subsectionWithPath("_embedded.scheduleInfoResourceList")
.description("Contains a collection of Schedules/"),
@@ -132,8 +133,8 @@ public void listAllSchedules() throws Exception {
private void createTaskDefinition(String taskName) throws Exception{
documentation.dontDocument( () -> this.mockMvc.perform(
post("/tasks/definitions")
- .param("name", taskName)
- .param("definition", "timestamp --format='yyyy MM dd'"))
+ .queryParam("name", taskName)
+ .queryParam("definition", "timestamp --format='yyyy MM dd'"))
.andExpect(status().isOk()));
}
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java
index cf462ce569..7f9a20bc24 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskValidationDocumentation.java
@@ -16,11 +16,11 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.core.ApplicationType;
@@ -38,30 +38,32 @@
* Documentation for the /tasks/validation endpoint.
*
* @author Glenn Renfro
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TaskValidationDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class TaskValidationDocumentation extends BaseDocumentation {
- @Before
- public void setup() throws Exception {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
createTaskDefinition("taskC");
}
- @After
- public void tearDown() throws Exception {
+ @AfterEach
+ void tearDown() throws Exception {
destroyTaskDefinition("taskC");
unregisterApp(ApplicationType.task, "timestamp");
}
- @Test
- public void validateTask() throws Exception {
+ @Test
+ void validateTask() throws Exception {
this.mockMvc.perform(
get("/tasks/validation/{name}", "taskC"))
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
pathParameters(
- parameterWithName("name").description("The name of a task definition to be validated (required)")
+ parameterWithName("name").description("The name of a task definition to be validated")
),
responseFields(
fieldWithPath("appName").description("The name of a task definition"),
diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java
index 82adf0363d..c63bd79079 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java
+++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TasksInfoDocumentation.java
@@ -16,11 +16,11 @@
package org.springframework.cloud.dataflow.server.rest.documentation;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestMethodOrder;
import org.springframework.cloud.dataflow.core.ApplicationType;
@@ -30,37 +30,37 @@
import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath;
import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields;
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Documentation for the /tasks/info endpoint.
*
* @author Ilayaperumal Gopinathan
+ * @author Corneil du Plessis
*/
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TasksInfoDocumentation extends BaseDocumentation {
+@SuppressWarnings("NewClassNamingConvention")
+@TestMethodOrder(MethodOrderer.MethodName.class)
+class TasksInfoDocumentation extends BaseDocumentation {
- @Before
- public void setup() throws Exception {
- registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE");
+ @BeforeEach
+ void setup() throws Exception {
+ registerApp(ApplicationType.task, "timestamp", "3.0.0");
createTaskDefinition("taskA");
createTaskDefinition("taskB");
}
- @After
- public void tearDown() throws Exception {
+ @AfterEach
+ void tearDown() throws Exception {
destroyTaskDefinition("taskA");
destroyTaskDefinition("taskB");
unregisterApp(ApplicationType.task, "timestamp");
}
@Test
- public void getTaskExecutionsInfo() throws Exception {
+ void getTaskExecutionsInfo() throws Exception {
this.mockMvc.perform(
get("/tasks/info/executions?completed=false"))
- .andDo(print())
.andExpect(status().isOk())
.andDo(this.documentationHandler.document(
responseFields(
diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties b/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties
new file mode 100644
index 0000000000..292487b111
--- /dev/null
+++ b/spring-cloud-dataflow-classic-docs/src/test/resources/META-INF/build-info.properties
@@ -0,0 +1,5 @@
+build.artifact=spring-cloud-dataflow-server
+build.group=org.springframework.cloud
+build.name=Spring Cloud Data Flow Server
+build.time=2024-04-25T12\:36\:37.169Z
+build.version=2.11.3-SNAPSHOT
diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties b/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties
new file mode 100644
index 0000000000..3a17e69030
--- /dev/null
+++ b/spring-cloud-dataflow-classic-docs/src/test/resources/git.properties
@@ -0,0 +1,5 @@
+#Generated by Git-Commit-Id-Plugin
+git.branch=main
+git.commit.id.abbrev=fddafed
+git.commit.id.full=fddafed39b919981cbb5bd04bd7fb5266fa25309
+git.commit.time=2024-04-24T13\:35\:29+0200
diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet
index da64dda944..dfd62e5661 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet
+++ b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/path-parameters.snippet
@@ -1,10 +1,11 @@
-{{path}}
+`*{{path}}*`
|===
-|Parameter|Description
+|Parameter - Description
{{#parameters}}
-|{{#tableCellContent}}`+{{name}}+`{{/tableCellContent}}
-|{{#tableCellContent}}{{description}}{{/tableCellContent}}
+|{{#tableCellContent}}`*{{name}}*` {{#optional}} [small]#_(optional)_#{{/optional}}{{^optional}} [small]#*(required)*#{{/optional}}{{/tableCellContent}}
+
+{{#tableCellContent}}{{description}}{{/tableCellContent}}
{{/parameters}}
|===
\ No newline at end of file
diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/query-parameters.snippet b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/query-parameters.snippet
new file mode 100644
index 0000000000..a0712f5952
--- /dev/null
+++ b/spring-cloud-dataflow-classic-docs/src/test/resources/org/springframework/restdocs/templates/asciidoctor/query-parameters.snippet
@@ -0,0 +1,9 @@
+|===
+|Parameter - Description
+{{#parameters}}
+|{{#tableCellContent}}`*{{name}}*` {{#optional}} [small]#_(optional)_#{{/optional}}{{^optional}} [small]#*(required)*#{{/optional}}{{/tableCellContent}}
+
+{{#tableCellContent}}{{description}}{{/tableCellContent}}
+
+{{/parameters}}
+|===
\ No newline at end of file
diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml
index 061f02c72c..da305f6faa 100644
--- a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml
+++ b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml
@@ -1,4 +1,6 @@
spring:
+ main:
+ allow-bean-definition-overriding: true
cloud:
dataflow:
features:
@@ -8,6 +10,9 @@ spring:
metrics:
collector:
uri: http://localhost:${fakeMetricsCollector.port}
+ deployer:
+ local:
+ maximumConcurrentTasks: 50
autoconfigure:
exclude: >-
org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration,
@@ -20,7 +25,10 @@ spring:
org.springframework.cloud.dataflow.shell.autoconfigure.BaseShellAutoConfiguration,
org.springframework.cloud.dataflow.server.config.cloudfoundry.CloudFoundryTaskPlatformAutoConfiguration,
org.springframework.cloud.dataflow.server.config.kubernetes.KubernetesTaskPlatformAutoConfiguration
-maven:
- remoteRepositories:
- springRepo:
- url: https://repo.spring.io/libs-snapshot
+management:
+ info:
+ build:
+ enabled: true
+ git:
+ enabled: true
+ mode: full
\ No newline at end of file
diff --git a/spring-cloud-dataflow-common/README.md b/spring-cloud-dataflow-common/README.md
new file mode 100644
index 0000000000..781b568e5f
--- /dev/null
+++ b/spring-cloud-dataflow-common/README.md
@@ -0,0 +1 @@
+# spring-cloud-dataflow-common
diff --git a/spring-cloud-dataflow-common/pom.xml b/spring-cloud-dataflow-common/pom.xml
new file mode 100644
index 0000000000..1daa30ff73
--- /dev/null
+++ b/spring-cloud-dataflow-common/pom.xml
@@ -0,0 +1,106 @@
+
+
+ 4.0.0
+
+ spring-cloud-dataflow-common-parent
+ 3.0.0-SNAPSHOT
+ org.springframework.cloud
+ pom
+
+ Spring Cloud Dataflow Common Parent
+ Common utilities sharing dataflow family
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-build
+ 3.0.0-SNAPSHOT
+ ../spring-cloud-dataflow-build
+
+
+
+ 0.10.2
+ 2.12.7
+
+
+
+ spring-cloud-dataflow-common-persistence
+ spring-cloud-dataflow-common-flyway
+ spring-cloud-dataflow-common-test-docker
+ spring-cloud-dataflow-common-test-docker-junit5
+ spring-cloud-dataflow-common-dependencies
+
+
+
+
+
+ com.github.zafarkhaja
+ java-semver
+ ${java-semver.version}
+
+
+ joda-time
+ joda-time
+ ${joda-time.version}
+
+
+
+
+
+ spring
+ true
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ false
+
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ false
+
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml
new file mode 100644
index 0000000000..9a05bdd4e0
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-dependencies/pom.xml
@@ -0,0 +1,95 @@
+
+
+ 4.0.0
+
+ spring-cloud-dataflow-dependencies-parent
+ org.springframework.cloud
+ 3.0.0-SNAPSHOT
+ ../../spring-cloud-dataflow-build/spring-cloud-dataflow-dependencies-parent
+
+ spring-cloud-dataflow-common-dependencies
+ 3.0.0-SNAPSHOT
+ pom
+ Spring Cloud Dataflow Common Dependencies
+ Spring Cloud Dataflow Common Dependencies
+
+
+
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-flyway
+ ${dataflow.version}
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-test-docker
+ ${dataflow.version}
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-test-docker-junit5
+ ${dataflow.version}
+
+
+
+
+
+ spring
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ false
+
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+
+
+ spring-snapshots
+ Spring Snapshots
+ https://repo.spring.io/snapshot
+
+ true
+
+
+
+ spring-milestones
+ Spring Milestones
+ https://repo.spring.io/milestone
+
+ false
+
+
+
+ maven-central
+ Maven Central
+ https://repo.maven.apache.org/maven2
+
+ false
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml
new file mode 100644
index 0000000000..d10b405347
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/pom.xml
@@ -0,0 +1,78 @@
+
+
+ 4.0.0
+
+ spring-cloud-dataflow-common-flyway
+ jar
+ Spring Cloud Dataflow Common Flyway Support
+ Spring Cloud Dataflow Common Flyway Support
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-parent
+ 3.0.0-SNAPSHOT
+
+
+
+
+ 10.10.0
+
+
+
+
+ org.springframework.boot
+ spring-boot
+
+
+ org.springframework
+ spring-jdbc
+
+
+ org.flywaydb
+ flyway-core
+
+
+ org.flywaydb
+ flyway-mysql
+
+
+ org.flywaydb
+ flyway-sqlserver
+
+
+ org.flywaydb
+ flyway-database-oracle
+
+
+ org.flywaydb
+ flyway-database-db2
+
+
+ org.flywaydb
+ flyway-database-postgresql
+
+
+ org.slf4j
+ slf4j-api
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-persistence
+ ${dataflow.version}
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+
+
+
+ src/main/resources
+ true
+
+
+
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java
new file mode 100644
index 0000000000..d548a6cc96
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57Database.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2022-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.flywaydb.database.mysql;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.flywaydb.core.api.MigrationVersion;
+import org.flywaydb.core.api.configuration.Configuration;
+import org.flywaydb.core.extensibility.Tier;
+import org.flywaydb.core.internal.database.base.Database;
+import org.flywaydb.core.internal.database.base.Table;
+import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
+import org.flywaydb.core.internal.jdbc.StatementInterceptor;
+import org.flywaydb.database.mysql.mariadb.MariaDBDatabaseType;
+
+public class MySQL57Database extends Database {
+
+ private final MySQLDatabase delegateDatabase;
+
+ public MySQL57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
+ this(configuration, jdbcConnectionFactory, statementInterceptor, new MySQLDatabase(configuration, jdbcConnectionFactory, statementInterceptor));
+ }
+
+ protected MySQL57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor, MySQLDatabase delegateDatabase) {
+ super(configuration, jdbcConnectionFactory, statementInterceptor);
+ this.delegateDatabase = delegateDatabase;
+ }
+
+ @Override
+ public String getRawCreateScript(Table table, boolean baseline) {
+ return delegateDatabase.getRawCreateScript(table, baseline);
+ }
+
+ @Override
+ protected MySQLConnection doGetConnection(Connection connection) {
+ return delegateDatabase.doGetConnection(connection);
+ }
+
+ @Override
+ protected MigrationVersion determineVersion() {
+ return delegateDatabase.determineVersion();
+ }
+
+ @Override
+ public void ensureSupported(Configuration configuration) {
+ ensureDatabaseIsRecentEnough("5.1");
+ if (databaseType instanceof MariaDBDatabaseType) {
+ ensureDatabaseNotOlderThanOtherwiseRecommendUpgradeToFlywayEdition("10.4", List.of(Tier.ENTERPRISE), configuration);
+ recommendFlywayUpgradeIfNecessary("10.6");
+ } else {
+ ensureDatabaseNotOlderThanOtherwiseRecommendUpgradeToFlywayEdition("5.7", List.of(Tier.ENTERPRISE), configuration);
+ recommendFlywayUpgradeIfNecessary("8.0");
+ }
+ }
+
+ @Override
+ public void close() {
+ try {
+ super.close();
+ } finally {
+ delegateDatabase.close();
+ }
+ }
+
+ @Override
+ protected String doGetCurrentUser() throws SQLException {
+ return delegateDatabase.doGetCurrentUser();
+ }
+
+ @Override
+ public boolean supportsDdlTransactions() {
+ return delegateDatabase.supportsDdlTransactions();
+ }
+
+ @Override
+ public String getBooleanTrue() {
+ return delegateDatabase.getBooleanTrue();
+ }
+
+ @Override
+ public String getBooleanFalse() {
+ return delegateDatabase.getBooleanFalse();
+ }
+
+ @Override
+ public String getOpenQuote() {
+ return delegateDatabase.getOpenQuote();
+ }
+
+ @Override
+ public String getCloseQuote() {
+ return delegateDatabase.getCloseQuote();
+ }
+
+ @Override
+ public boolean catalogIsSchema() {
+ return delegateDatabase.catalogIsSchema();
+ }
+
+ @Override
+ public boolean useSingleConnection() {
+ return delegateDatabase.useSingleConnection();
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java
new file mode 100644
index 0000000000..04b39f74bf
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/MySQL57DatabaseType.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2022-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.flywaydb.database.mysql;
+
+import org.flywaydb.core.api.configuration.Configuration;
+import org.flywaydb.core.internal.database.base.Database;
+import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
+import org.flywaydb.core.internal.jdbc.StatementInterceptor;
+
+public class MySQL57DatabaseType extends MySQLDatabaseType {
+
+ @Override
+ public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
+ return new MySQL57Database(configuration, jdbcConnectionFactory, statementInterceptor);
+ }
+
+ @Override
+ public int getPriority() {
+ return super.getPriority() + 1;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java
new file mode 100644
index 0000000000..9020304731
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57Database.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2022-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.flywaydb.database.mysql.mariadb;
+
+import org.flywaydb.core.api.configuration.Configuration;
+import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
+import org.flywaydb.core.internal.jdbc.StatementInterceptor;
+import org.flywaydb.database.mysql.MySQL57Database;
+
+public class MariaDB57Database extends MySQL57Database {
+
+ public MariaDB57Database(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
+ super(configuration, jdbcConnectionFactory, statementInterceptor, new MariaDBDatabase(configuration, jdbcConnectionFactory, statementInterceptor));
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java
new file mode 100644
index 0000000000..644e420895
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/flywaydb/database/mysql/mariadb/MariaDB57DatabaseType.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2022-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.flywaydb.database.mysql.mariadb;
+
+import org.flywaydb.core.api.configuration.Configuration;
+import org.flywaydb.core.internal.database.base.Database;
+import org.flywaydb.core.internal.jdbc.JdbcConnectionFactory;
+import org.flywaydb.core.internal.jdbc.StatementInterceptor;
+
+public class MariaDB57DatabaseType extends MariaDBDatabaseType {
+
+ @Override
+ public Database createDatabase(Configuration configuration, JdbcConnectionFactory jdbcConnectionFactory, StatementInterceptor statementInterceptor) {
+ return new MariaDB57Database(configuration, jdbcConnectionFactory, statementInterceptor);
+ }
+
+ @Override
+ public int getPriority() {
+ return super.getPriority() + 2;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java
new file mode 100644
index 0000000000..58050cf9bb
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractCallback.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.util.List;
+
+import org.flywaydb.core.api.callback.Callback;
+import org.flywaydb.core.api.callback.Context;
+import org.flywaydb.core.api.callback.Event;
+
+import org.springframework.jdbc.BadSqlGrammarException;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * Base implementation providing some shared features for java based callbacks.
+ *
+ * @author Janne Valkealahti
+ *
+ */
+public abstract class AbstractCallback implements Callback {
+
+ private final Event event;
+ private final List commands;
+ private final SqlCommandsRunner runner = new SqlCommandsRunner();
+
+ /**
+ * Instantiates a new abstract callback.
+ *
+ * @param event the event to hook into
+ */
+ public AbstractCallback(Event event) {
+ this(event, null);
+ }
+
+ /**
+ * Instantiates a new abstract callback.
+ *
+ * @param event the event to hook into
+ * @param commands the sql commands to run
+ */
+ public AbstractCallback(Event event, List commands) {
+ this.event = event;
+ this.commands = commands;
+ }
+
+ @Override
+ public boolean supports(Event event, Context context) {
+ return ObjectUtils.nullSafeEquals(this.event, event);
+ }
+
+ @Override
+ public boolean canHandleInTransaction(Event event, Context context) {
+ return true;
+ }
+
+ @Override
+ public void handle(Event event, Context context) {
+ try {
+ runner.execute(context.getConnection(), getCommands(event, context));
+ }
+ catch(Exception sqe) {
+ if (sqe instanceof BadSqlGrammarException) {
+ throw new DataFlowSchemaMigrationException(
+ "An exception occured during migration. This may indicate " +
+ "that you have run Spring Batch Jobs or Spring Cloud " +
+ "Tasks prior to running Spring Cloud Data Flow first. " +
+ "Data Flow must create these tables.", sqe);
+
+ }
+ throw sqe;
+ }
+ }
+
+ @Override
+ public String getCallbackName() {
+ return "";
+ }
+
+ /**
+ * Gets the commands.
+ *
+ * @param event the event
+ * @param context the context
+ * @return the commands
+ */
+ public List getCommands(Event event, Context context) {
+ return commands;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java
new file mode 100644
index 0000000000..8625692121
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/AbstractMigration.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.util.List;
+
+import org.flywaydb.core.api.migration.BaseJavaMigration;
+import org.flywaydb.core.api.migration.Context;
+
+/**
+ * Base implementation providing some shared features for java based migrations.
+ *
+ * @author Janne Valkealahti
+ *
+ */
+public abstract class AbstractMigration extends BaseJavaMigration {
+
+ private final List commands;
+ private final SqlCommandsRunner runner = new SqlCommandsRunner();
+
+ /**
+ * Instantiates a new abstract migration.
+ *
+ * @param commands the commands
+ */
+ public AbstractMigration(List commands) {
+ super();
+ this.commands = commands;
+ }
+
+ @Override
+ public void migrate(Context context) throws Exception {
+ runner.execute(context.getConnection(), getCommands());
+ }
+
+ /**
+ * Gets the commands.
+ *
+ * @return the commands
+ */
+ public List getCommands() {
+ return commands;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java
new file mode 100644
index 0000000000..c59f339de4
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DataFlowSchemaMigrationException.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.common.flyway;
+
+/**
+ * Exception is thrown when an error occurs while migrating the dataflow schema.
+ *
+ * @author Glenn Renfro
+ */
+public class DataFlowSchemaMigrationException extends RuntimeException{
+
+ private static final long serialVersionUID = 2000527476523962349L;
+
+ /**
+ * Exception will use the message specified.
+ *
+ * @param message the text that will be associated with the exception.
+ * @param throwable the exception that is being wrapped.
+ */
+ public DataFlowSchemaMigrationException(String message, Throwable throwable) {
+ super(message, throwable);
+ }
+
+ /**
+ * Exception will use the message specified.
+ * @param message the text that will be associated with the exception.
+ */
+ public DataFlowSchemaMigrationException(String message) {
+ super(message);
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java
new file mode 100644
index 0000000000..cf5ab9a466
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/DatabaseDriverUtils.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2022-2022 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.sql.DatabaseMetaData;
+
+import javax.sql.DataSource;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.boot.jdbc.DatabaseDriver;
+import org.springframework.jdbc.support.JdbcUtils;
+import org.springframework.jdbc.support.MetaDataAccessException;
+
+/**
+ * Provides utility methods to help with {@link DatabaseDriver} related operations.
+ */
+public final class DatabaseDriverUtils {
+
+ private static final Logger LOG = LoggerFactory.getLogger(DatabaseDriverUtils.class);
+
+ private DatabaseDriverUtils() {
+ }
+
+ /**
+ * Finds a database driver suitable for a datasource.
+ *
By default, the jdbc url reported from the database metdata is used to determine
+ * the driver. It also handles the special case where MariaDB reports a 'jdbc:maria'
+ * url eventhough the original url was prefixed with 'jdbc:mysql'.
+ *
+ * @param dataSource the datasource to inspect
+ * @return a database driver suitable for the datasource
+ */
+ public static DatabaseDriver getDatabaseDriver(DataSource dataSource) {
+ // copied from boot's flyway auto-config to get matching db vendor id (but adjusted
+ // to handle the case when MariaDB driver is being used against MySQL database).
+ try {
+ String url = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getURL);
+ DatabaseDriver databaseDriver = DatabaseDriver.fromJdbcUrl(url);
+ if (databaseDriver == DatabaseDriver.MARIADB) {
+ // MariaDB reports a 'jdbc:maria' url even when user specified 'jdbc:mysql'.
+ // Verify the underlying database is not really MySql.
+ String product = JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseProductName);
+ if (DatabaseDriver.MYSQL.name().equalsIgnoreCase(product)) {
+ LOG.info("Using MariaDB driver against MySQL database - will use MySQL");
+ databaseDriver = DatabaseDriver.MYSQL;
+ }
+ }
+ return databaseDriver;
+ }
+ catch (MetaDataAccessException ex) {
+ throw new IllegalStateException(ex);
+ }
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java
new file mode 100644
index 0000000000..aa98aec176
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializer.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2022-2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.boot.env.EnvironmentPostProcessor;
+import org.springframework.context.ApplicationContextInitializer;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.core.Ordered;
+import org.springframework.core.env.ConfigurableEnvironment;
+import org.springframework.core.env.MapPropertySource;
+
+/**
+ * An {@link ApplicationContextInitializer} that replaces any configured 'spring.flyways.locations'
+ * properties that contain the '{vendor}' token with 'mysql' when using the MariaDB driver
+ * to access a MySQL database.
+ *
+ *
Typically property manipulation like this is implemented as an {@link EnvironmentPostProcessor} but
+ * in order to work with applications that are using Config server it must be a context initializer
+ * so it can run after the {@code org.springframework.cloud.bootstrap.config.PropertySourceBootstrapConfiguration}
+ * context initializer.
+ *
+ * @author Chris Bono
+ */
+public class FlywayVendorReplacingApplicationContextInitializer implements
+ ApplicationContextInitializer, Ordered {
+
+ private final Logger log = LoggerFactory.getLogger(FlywayVendorReplacingApplicationContextInitializer.class);
+
+ @Override
+ public void initialize(ConfigurableApplicationContext applicationContext) {
+
+ ConfigurableEnvironment env = applicationContext.getEnvironment();
+
+ // If there is a spring.datasource.url prefixed w/ "jdbc:mysql:" and using the MariaDB driver then replace {vendor}
+ boolean usingMariaDriver = env.getProperty("spring.datasource.driver-class-name", "").equals("org.mariadb.jdbc.Driver");
+ boolean usingMySqlUrl = env.getProperty("spring.datasource.url", "").startsWith("jdbc:mysql:");
+ if (!(usingMariaDriver && usingMySqlUrl)) {
+ return;
+ }
+
+ log.info("Using MariaDB driver w/ MySQL url - looking for '{vendor}' in 'spring.flyway.locations'");
+
+ // Look for spring.flyway.locations[0..N] and if found then override it w/ vendor replaced version
+ Map replacedLocations = new HashMap<>();
+
+ int prodIdx = 0;
+ while (true) {
+ String locationPropName = String.format("spring.flyway.locations[%d]", prodIdx++);
+ String configuredLocation = env.getProperty(locationPropName);
+ if (configuredLocation == null) {
+ break;
+ }
+ if (configuredLocation.contains("{vendor}")) {
+ String replaceLocation = configuredLocation.replace("{vendor}", "mysql");
+ replacedLocations.put(locationPropName, replaceLocation);
+ }
+ }
+
+ if (replacedLocations.isEmpty()) {
+ log.info("No properties with '{vendor}' found to replace");
+ return;
+ }
+
+ log.info("Replacing '{vendor}' in {}", replacedLocations);
+
+ env.getPropertySources().addFirst(new MapPropertySource("overrideVendorInFlywayLocations", replacedLocations));
+ }
+
+ /**
+ * The precedence for execution order - should execute last.
+ *
+ * @return lowest precedence to ensure it executes after other initializers
+ */
+ @Override
+ public int getOrder() {
+ return Ordered.LOWEST_PRECEDENCE;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java
new file mode 100644
index 0000000000..9e5b94b9aa
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommand.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.sql.Connection;
+import java.util.Collections;
+import java.util.List;
+
+import org.springframework.jdbc.core.JdbcTemplate;
+
+/**
+ * Class keeping a sql command and its possible suppressing sql codes together.
+ *
+ * @author Janne Valkealahti
+ *
+ */
+public class SqlCommand {
+
+ private final String command;
+ private final List suppressedErrorCodes;
+
+ /**
+ * Convenience method returning new instance.
+ *
+ * @param command the command
+ * @return the sql command
+ */
+ public static SqlCommand from(String command) {
+ return new SqlCommand(command, null);
+ }
+
+ /**
+ * Convenience method returning new instance.
+ *
+ * @param command the command
+ * @param suppressedErrorCode the suppressed error code
+ * @return the sql command
+ */
+ public static SqlCommand from(String command, int suppressedErrorCode) {
+ return new SqlCommand(command, suppressedErrorCode);
+ }
+
+ public SqlCommand() {
+ this(null, null);
+ }
+
+ /**
+ * Instantiates a new sql command.
+ *
+ * @param command the command
+ * @param suppressedErrorCode the suppressed error code
+ */
+ public SqlCommand(String command, int suppressedErrorCode) {
+ this(command, Collections.singletonList(suppressedErrorCode));
+ }
+
+ /**
+ * Instantiates a new sql command.
+ *
+ * @param command the command
+ * @param suppressedErrorCodes the suppressed error codes
+ */
+ public SqlCommand(String command, List suppressedErrorCodes) {
+ this.command = command;
+ this.suppressedErrorCodes = suppressedErrorCodes;
+ }
+
+ /**
+ * Gets the command.
+ *
+ * @return the command
+ */
+ public String getCommand() {
+ return command;
+ }
+
+ /**
+ * Gets the suppressed error codes.
+ *
+ * @return the suppressed error codes
+ */
+ public List getSuppressedErrorCodes() {
+ return suppressedErrorCodes;
+ }
+
+ /**
+ * Checks if this command can handle execution directly
+ * in a given jdbc template.
+ *
+ * @return true, if command can handle jdbc template
+ */
+ public boolean canHandleInJdbcTemplate() {
+ return false;
+ }
+
+ /**
+ * Handle command in a given jdbc template.
+ *
+ * @param jdbcTemplate the jdbc template
+ * @param connection the sql connection
+ */
+ public void handle(JdbcTemplate jdbcTemplate, Connection connection) {
+ // expected to get handled in a sub-class
+ throw new UnsupportedOperationException("Not supported in a base class");
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java
new file mode 100644
index 0000000000..7e1e42989d
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SqlCommandsRunner.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.sql.Connection;
+import java.util.List;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.datasource.SingleConnectionDataSource;
+import org.springframework.jdbc.support.SQLExceptionTranslator;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * Simple utility class to run commands with a connection and possibly suppress
+ * errors.
+ *
+ * @author Janne Valkealahti
+ *
+ */
+public class SqlCommandsRunner {
+
+ private static final Logger logger = LoggerFactory.getLogger(SqlCommandsRunner.class);
+
+ /**
+ * Execute list of {@code SqlCommand} by suppressing errors if those are given
+ * with a command.
+ *
+ * @param connection the connection
+ * @param commands the sql commands
+ */
+ public void execute(Connection connection, List commands) {
+ JdbcTemplate jdbcTemplate = new JdbcTemplate(new SingleConnectionDataSource(connection, true));
+ SQLExceptionTranslator origExceptionTranslator = jdbcTemplate.getExceptionTranslator();
+
+ for (SqlCommand command : commands) {
+ if (command.canHandleInJdbcTemplate()) {
+ command.handle(jdbcTemplate, connection);
+ }
+ else {
+ if(!ObjectUtils.isEmpty(command.getSuppressedErrorCodes())) {
+ jdbcTemplate.setExceptionTranslator(new SuppressSQLErrorCodesTranslator(command.getSuppressedErrorCodes()));
+ }
+ try {
+ logger.debug("Executing command {}", command.getCommand());
+ jdbcTemplate.execute(command.getCommand());
+ } catch (SuppressDataAccessException e) {
+ logger.debug("Suppressing error {}", e);
+ }
+ // restore original translator in case next command
+ // doesn't define suppressing codes.
+ jdbcTemplate.setExceptionTranslator(origExceptionTranslator);
+ }
+ }
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java
new file mode 100644
index 0000000000..28098afbc7
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressDataAccessException.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.flyway;
+
+import org.springframework.dao.DataAccessException;
+
+/**
+ * Special type of {@link DataAccessException} indicating that error can be
+ * suppressed.
+ *
+ * @author Janne Valkealahti
+ *
+ */
+@SuppressWarnings("serial")
+public class SuppressDataAccessException extends DataAccessException {
+
+ /**
+ * Instantiates a new suppress data access exception.
+ *
+ * @param msg the msg
+ * @param cause the cause
+ */
+ public SuppressDataAccessException(String msg, Throwable cause) {
+ super(msg, cause);
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java
new file mode 100644
index 0000000000..385daa4905
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/java/org/springframework/cloud/dataflow/common/flyway/SuppressSQLErrorCodesTranslator.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.List;
+
+import org.slf4j.LoggerFactory;
+
+import org.springframework.dao.DataAccessException;
+import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator;
+import org.springframework.util.Assert;
+
+/**
+ * {@link SQLErrorCodeSQLExceptionTranslator} suppressing errors based on
+ * configured list of codes by throwing dedicated {@link SuppressDataAccessException}.
+ *
+ * @author Janne Valkealahti
+ *
+ */
+public class SuppressSQLErrorCodesTranslator extends SQLErrorCodeSQLExceptionTranslator {
+
+ private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SuppressSQLErrorCodesTranslator.class);
+ private final List errorCodes;
+
+ /**
+ * Instantiates a new suppress SQL error codes translator.
+ *
+ * @param errorCode the error code
+ */
+ public SuppressSQLErrorCodesTranslator(int errorCode) {
+ this(Arrays.asList(errorCode));
+ }
+
+ /**
+ * Instantiates a new suppress SQL error codes translator.
+ *
+ * @param errorCodes the error codes
+ */
+ public SuppressSQLErrorCodesTranslator(List errorCodes) {
+ super();
+ Assert.notNull(errorCodes, "errorCodes must be set");
+ this.errorCodes = errorCodes;
+ }
+
+ @Override
+ protected DataAccessException customTranslate(String task, String sql, SQLException sqlEx) {
+ logger.debug("Checking sql error code {} against {}", sqlEx.getErrorCode(), errorCodes);
+ if (errorCodes.contains(sqlEx.getErrorCode())) {
+ return new SuppressDataAccessException(task, sqlEx);
+ }
+ return super.customTranslate(task, sql, sqlEx);
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin
new file mode 100644
index 0000000000..ccab893acc
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/META-INF/services/org.flywaydb.core.extensibility.Plugin
@@ -0,0 +1,2 @@
+org.flywaydb.database.mysql.mariadb.MariaDB57DatabaseType
+org.flywaydb.database.mysql.MySQL57DatabaseType
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt
new file mode 100644
index 0000000000..a4a0d40d19
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/main/resources/org/flywaydb/database/version.txt
@@ -0,0 +1 @@
+@flyway.version@
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java
new file mode 100644
index 0000000000..6270f5ff7a
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-flyway/src/test/java/org/springframework/cloud/dataflow/common/flyway/FlywayVendorReplacingApplicationContextInitializerTests.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2022-2023 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.cloud.dataflow.common.flyway;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Stream;
+
+import org.junit.jupiter.api.Named;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
+import org.springframework.boot.test.context.runner.ApplicationContextRunner;
+import org.springframework.core.env.ConfigurableEnvironment;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.params.provider.Arguments.arguments;
+
+/**
+ * Tests for {@link FlywayVendorReplacingApplicationContextInitializer}.
+ */
+class FlywayVendorReplacingApplicationContextInitializerTests {
+
+ @ParameterizedTest(name = "{0}")
+ @MethodSource("vendorReplacedProperlyProvider")
+ void vendorReplacedProperly(boolean usingMySqlUrl, boolean usingMariaDriver, List configuredLocationProps, List finalLocationProps) {
+ List props = new ArrayList<>();
+ props.add("spring.datasource.url=" + (usingMySqlUrl ? "jdbc:mysql://localhost:3306/dataflow?permitMysqlScheme" : "jdbc:mariadb://localhost:3306/dataflow"));
+ props.add("spring.datasource.driver-class-name=" + (usingMariaDriver ? "org.mariadb.jdbc.Driver" : "org.mysql.jdbc.Driver"));
+ props.addAll(configuredLocationProps);
+
+ // Prime an actual env by running it through the AppContextRunner with the configured properties
+ new ApplicationContextRunner().withPropertyValues(props.toArray(new String[0])).run((context) -> {
+ ConfigurableEnvironment env = context.getEnvironment();
+
+ // Sanity check the locations props are as expected
+ configuredLocationProps.forEach((location) -> {
+ String key = location.split("=")[0];
+ String value = location.split("=")[1];
+ assertThat(env.getProperty(key)).isEqualTo(value);
+ });
+
+ // Run the env through the ACI
+ FlywayVendorReplacingApplicationContextInitializer flywayVendorReplacingInitializer = new FlywayVendorReplacingApplicationContextInitializer();
+ flywayVendorReplacingInitializer.initialize(context);
+
+ // Verify they are replaced as expected
+ finalLocationProps.forEach((location) -> {
+ String key = location.split("=")[0];
+ String value = location.split("=")[1];
+ assertThat(env.getProperty(key)).isEqualTo(value);
+ });
+ });
+ }
+
+ private static Stream vendorReplacedProperlyProvider() {
+ return Stream.of(
+ arguments(Named.of("singleLocationWithVendor",true), true,
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"),
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/mysql")
+ ),
+ arguments(Named.of("singleLocationWithoutVendor",true), true,
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/foo"),
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/foo")
+ ),
+ arguments(Named.of("noLocations",true), true,
+ Collections.emptyList(),
+ Collections.emptyList()
+ ),
+ arguments(Named.of("multiLocationsAllWithVendor",true), true,
+ Arrays.asList(
+ "spring.flyway.locations[0]=classpath:org/skipper/db0/{vendor}",
+ "spring.flyway.locations[1]=classpath:org/skipper/db1/{vendor}",
+ "spring.flyway.locations[2]=classpath:org/skipper/db2/{vendor}"),
+ Arrays.asList(
+ "spring.flyway.locations[0]=classpath:org/skipper/db0/mysql",
+ "spring.flyway.locations[1]=classpath:org/skipper/db1/mysql",
+ "spring.flyway.locations[2]=classpath:org/skipper/db2/mysql")
+ ),
+ arguments(Named.of("multiLocationsSomeWithVendor",true), true,
+ Arrays.asList(
+ "spring.flyway.locations[0]=classpath:org/skipper/db0/{vendor}",
+ "spring.flyway.locations[1]=classpath:org/skipper/db1/foo",
+ "spring.flyway.locations[2]=classpath:org/skipper/db2/{vendor}"),
+ Arrays.asList(
+ "spring.flyway.locations[0]=classpath:org/skipper/db0/mysql",
+ "spring.flyway.locations[1]=classpath:org/skipper/db1/foo",
+ "spring.flyway.locations[2]=classpath:org/skipper/db2/mysql")
+ ),
+ arguments(Named.of("multiLocationsNoneWithVendor",true), true,
+ Arrays.asList(
+ "spring.flyway.locations[0]=classpath:org/skipper/db0/foo",
+ "spring.flyway.locations[1]=classpath:org/skipper/db1/bar",
+ "spring.flyway.locations[2]=classpath:org/skipper/db2/zaa"),
+ Arrays.asList(
+ "spring.flyway.locations[0]=classpath:org/skipper/db0/foo",
+ "spring.flyway.locations[1]=classpath:org/skipper/db1/bar",
+ "spring.flyway.locations[2]=classpath:org/skipper/db2/zaa")
+ ),
+ arguments(Named.of("mariaUrlWithMariaDriverDoesNotReplace",false), true,
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"),
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}")
+ ),
+ arguments(Named.of("mysqlUrlWithMysqlDriverDoesNotReplace",true), false,
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"),
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}")
+ ),
+ arguments(Named.of("mariaUrlMysqlDriverDoesNotReplace",false), false,
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}"),
+ Collections.singletonList("spring.flyway.locations[0]=classpath:org/skipper/db/{vendor}")
+ )
+ );
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml
new file mode 100644
index 0000000000..6edc2eb238
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/pom.xml
@@ -0,0 +1,70 @@
+
+
+ 4.0.0
+
+ spring-cloud-dataflow-parent
+ org.springframework.cloud
+ 3.0.0-SNAPSHOT
+ ../../spring-cloud-dataflow-parent
+
+ spring-cloud-dataflow-common-persistence
+ spring-cloud-dataflow-audit
+ Spring Cloud Data Flow Common Persistence Utilities
+ jar
+
+ true
+ 3.4.1
+
+
+
+ org.hibernate.orm
+ hibernate-core
+
+
+ org.springframework.data
+ spring-data-jpa
+
+
+ org.springframework.boot
+ spring-boot-starter-jdbc
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ ${maven-javadoc-plugin.version}
+
+
+ javadoc
+
+ jar
+
+ package
+
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 3.3.0
+
+
+ source
+
+ jar
+
+ package
+
+
+
+
+
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java
new file mode 100644
index 0000000000..a50350b076
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobType.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.persistence.type;
+
+import org.hibernate.type.AbstractSingleColumnStandardBasicType;
+import org.hibernate.type.descriptor.java.StringJavaType;
+import org.hibernate.type.descriptor.jdbc.AdjustableJdbcType;
+import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
+import org.hibernate.type.descriptor.jdbc.VarcharJdbcType;
+
+import org.springframework.util.Assert;
+
+/**
+ * Provide for Hibernate and Postgres incompatibility for columns of type text.
+ * @author Corneil du Plessis
+ */
+public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType {
+
+ public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType();
+
+
+
+ public DatabaseAwareLobType() {
+ super( getDbDescriptor(), StringJavaType.INSTANCE );
+ }
+
+ public static AdjustableJdbcType getDbDescriptor() {
+ if( isPostgres() ) {
+ return VarcharJdbcType.INSTANCE;
+ }
+ else {
+ return ClobJdbcType.DEFAULT;
+ }
+ }
+
+ /**
+ * This method will be used to set an indicator that the database driver in use is PostgreSQL.
+ * if postgresDB true if PostgreSQL.
+ */
+ private static boolean isPostgres() {
+ Boolean postgresDatabase = DatabaseTypeAwareInitializer.getPostgresDatabase();
+ Assert.notNull(postgresDatabase, "Expected postgresDatabase to be set");
+ return postgresDatabase;
+ }
+
+ @Override
+ public String getName() {
+ return "database_aware_lob";
+ }
+}
\ No newline at end of file
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobUserType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobUserType.java
new file mode 100644
index 0000000000..0b29fda148
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseAwareLobUserType.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.persistence.type;
+
+import java.util.function.BiConsumer;
+
+import org.hibernate.type.descriptor.java.BasicJavaType;
+import org.hibernate.type.descriptor.java.StringJavaType;
+import org.hibernate.type.descriptor.jdbc.AdjustableJdbcType;
+import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
+import org.hibernate.type.descriptor.jdbc.JdbcType;
+import org.hibernate.type.descriptor.jdbc.VarcharJdbcType;
+import org.hibernate.usertype.BaseUserTypeSupport;
+import org.hibernate.usertype.UserType;
+
+import org.springframework.util.Assert;
+
+/**
+ * A {@link UserType} that provides for Hibernate and Postgres incompatibility for columns of
+ * type text.
+ *
+ * @author Corneil du Plessis
+ * @author Chris Bono
+ * @since 3.0.0
+ */
+public class DatabaseAwareLobUserType extends BaseUserTypeSupport {
+
+ @Override
+ protected void resolve(BiConsumer, JdbcType> resolutionConsumer) {
+ resolutionConsumer.accept(StringJavaType.INSTANCE, getDbDescriptor());
+ }
+
+ public static AdjustableJdbcType getDbDescriptor() {
+ if( isPostgres() ) {
+ return VarcharJdbcType.INSTANCE;
+ }
+ else {
+ return ClobJdbcType.DEFAULT;
+ }
+ }
+
+ private static boolean isPostgres() {
+ Boolean postgresDatabase = DatabaseTypeAwareInitializer.getPostgresDatabase();
+ Assert.notNull(postgresDatabase, "Expected postgresDatabase to be set");
+ return postgresDatabase;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java
new file mode 100644
index 0000000000..c67454afe2
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/java/org/springframework/cloud/dataflow/common/persistence/type/DatabaseTypeAwareInitializer.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2019-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.persistence.type;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.context.ApplicationContextInitializer;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.core.Ordered;
+import org.springframework.core.env.ConfigurableEnvironment;
+
+public class DatabaseTypeAwareInitializer implements ApplicationContextInitializer, Ordered {
+ private static final Logger logger = LoggerFactory.getLogger(DatabaseTypeAwareInitializer.class);
+ private static Boolean postgresDatabase = null;
+
+ public DatabaseTypeAwareInitializer() {
+ }
+
+ @Override
+ public void initialize(ConfigurableApplicationContext applicationContext) {
+ ConfigurableEnvironment env = applicationContext.getEnvironment();
+ String property = env.getProperty("spring.datasource.driver-class-name", "");
+ logger.info("checking database driver type:{}", property);
+ postgresDatabase = property.contains("postgres");
+ }
+
+ public static Boolean getPostgresDatabase() {
+ return postgresDatabase;
+ }
+
+ @Override
+ public int getOrder() {
+ return Ordered.HIGHEST_PRECEDENCE;
+ }
+
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories
new file mode 100644
index 0000000000..189a0a6f71
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/main/resources/META-INF/spring.factories
@@ -0,0 +1 @@
+org.springframework.context.ApplicationContextInitializer=org.springframework.cloud.dataflow.common.persistence.type.DatabaseTypeAwareInitializer
\ No newline at end of file
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java
new file mode 100644
index 0000000000..1eec1f34a2
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-persistence/src/test/java/org/springframework/cloud/dataflow/common/persistence/DatabaseTypeAwareInitializerTest.java
@@ -0,0 +1,38 @@
+package org.springframework.cloud.dataflow.common.persistence;
+
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.boot.test.context.runner.ApplicationContextRunner;
+import org.springframework.cloud.dataflow.common.persistence.type.DatabaseTypeAwareInitializer;
+import org.springframework.core.env.ConfigurableEnvironment;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+class DatabaseTypeAwareInitializerTest {
+ private static final Logger logger = LoggerFactory.getLogger(DatabaseTypeAwareInitializerTest.class);
+
+ @Test
+ void initPostgres() {
+ initDriverType("org.postgresql.Driver");
+ assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isNotNull();
+ assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isTrue();
+ }
+
+ @Test
+ void initMariaDB() {
+ initDriverType("org.mariadb.jdbc.Driver");
+ assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isNotNull();
+ assertThat(DatabaseTypeAwareInitializer.getPostgresDatabase()).isFalse();
+ }
+ private void initDriverType(String driverClassName) {
+ // Prime an actual env by running it through the AppContextRunner with the configured properties
+ new ApplicationContextRunner().withPropertyValues("spring.datasource.driver-class-name=" + driverClassName).run((context) -> {
+ ConfigurableEnvironment env = context.getEnvironment();
+ logger.info("spring.datasource.driver-class-name={}", env.getProperty("spring.datasource.driver-class-name"));
+ DatabaseTypeAwareInitializer initializer = new DatabaseTypeAwareInitializer();
+ initializer.initialize(context);
+ });
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml
new file mode 100644
index 0000000000..10185f900c
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/pom.xml
@@ -0,0 +1,36 @@
+
+
+ 4.0.0
+
+ spring-cloud-dataflow-common-test-docker-junit5
+ jar
+
+ Spring Cloud Dataflow Common Docker Test JUnit5 Support
+ Utilities to help using junit5
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-parent
+ 3.0.0-SNAPSHOT
+
+
+ true
+
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-test-docker
+ ${project.version}
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+
+
+
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java
new file mode 100644
index 0000000000..3c4be95dd4
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Repeatable;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.junit.jupiter.api.extension.ExtendWith;
+
+@Documented
+@Inherited
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+@Repeatable(DockerComposes.class)
+@ExtendWith(DockerComposeExtension.class)
+public @interface DockerCompose {
+
+ int order() default 0;
+ String id() default "";
+ boolean start() default true;
+ String[] locations() default {};
+ String[] services() default {};
+ String log() default "";
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java
new file mode 100644
index 0000000000..81b635c156
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeCluster.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule;
+
+public interface DockerComposeCluster {
+
+ DockerComposeRule getRule();
+ void start();
+ void stop();
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java
new file mode 100644
index 0000000000..e8b3d8c463
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeExtension.java
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.lang.reflect.Method;
+import java.util.List;
+
+import org.junit.jupiter.api.extension.AfterAllCallback;
+import org.junit.jupiter.api.extension.AfterEachCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
+import org.junit.jupiter.api.extension.BeforeEachCallback;
+import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.jupiter.api.extension.ExtensionContext.Namespace;
+import org.junit.jupiter.api.extension.ExtensionContext.Store;
+import org.junit.jupiter.api.extension.ParameterContext;
+import org.junit.jupiter.api.extension.ParameterResolutionException;
+import org.junit.jupiter.api.extension.ParameterResolver;
+import org.junit.platform.commons.util.AnnotationUtils;
+
+import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule;
+import org.springframework.cloud.dataflow.common.test.docker.junit5.DockerComposeManager.DockerComposeData;
+
+/**
+ * {@code JUnit5} extension handling docker compose integration.
+ *
+ * @author Janne Valkealahti
+ *
+ */
+public class DockerComposeExtension
+ implements BeforeAllCallback, BeforeEachCallback, AfterAllCallback, AfterEachCallback, ParameterResolver {
+
+ private static final Namespace NAMESPACE = Namespace.create(DockerComposeExtension.class);
+
+ @Override
+ public void beforeAll(ExtensionContext extensionContext) throws Exception {
+ // add class level compose info into compose manager
+ DockerComposeManager dockerComposeManager = getDockerComposeManager(extensionContext);
+
+ Class> testClass = extensionContext.getRequiredTestClass();
+ String classKey = extensionContext.getRequiredTestClass().getSimpleName();
+
+ List dockerComposeAnnotations = AnnotationUtils.findRepeatableAnnotations(testClass, DockerCompose.class);
+ for (DockerCompose dockerComposeAnnotation : dockerComposeAnnotations) {
+ DockerComposeData dockerComposeData = new DockerComposeData(dockerComposeAnnotation.id(),
+ dockerComposeAnnotation.locations(), dockerComposeAnnotation.services(),
+ dockerComposeAnnotation.log(), dockerComposeAnnotation.start(), dockerComposeAnnotation.order());
+ dockerComposeManager.addClassDockerComposeData(classKey, dockerComposeData);
+ }
+ }
+
+ @Override
+ public void beforeEach(ExtensionContext context) throws Exception {
+ // add method level compose info into compose manager
+ DockerComposeManager dockerComposeManager = getDockerComposeManager(context);
+
+ Method testMethod = context.getRequiredTestMethod();
+ String classKey = context.getRequiredTestClass().getSimpleName();
+ String methodKey = context.getRequiredTestMethod().getName();
+
+ List dockerComposeAnnotations = AnnotationUtils.findRepeatableAnnotations(testMethod, DockerCompose.class);
+ for (DockerCompose dockerComposeAnnotation : dockerComposeAnnotations) {
+ DockerComposeData dockerComposeData = new DockerComposeData(dockerComposeAnnotation.id(),
+ dockerComposeAnnotation.locations(), dockerComposeAnnotation.services(),
+ dockerComposeAnnotation.log(), dockerComposeAnnotation.start(), dockerComposeAnnotation.order());
+ dockerComposeManager.addMethodDockerComposeData(classKey, methodKey, dockerComposeData);
+ }
+ dockerComposeManager.build(classKey, methodKey);
+ }
+
+ @Override
+ public void afterEach(ExtensionContext context) throws Exception {
+ // clean containers related to class and method
+ DockerComposeManager dockerComposeManager = getDockerComposeManager(context);
+ String classKey = context.getRequiredTestClass().getSimpleName();
+ String methodKey = context.getRequiredTestMethod().getName();
+ dockerComposeManager.stop(classKey, methodKey);
+ }
+
+ @Override
+ public void afterAll(ExtensionContext context) throws Exception {
+ }
+
+ @Override
+ public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
+ throws ParameterResolutionException {
+ return (parameterContext.getParameter().getType() == DockerComposeInfo.class);
+ }
+
+ @Override
+ public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
+ throws ParameterResolutionException {
+ DockerComposeManager dockerComposeManager = getDockerComposeManager(extensionContext);
+ return new DefaultDockerComposeInfo(dockerComposeManager);
+ }
+
+ private static DockerComposeManager getDockerComposeManager(ExtensionContext context) {
+ Class> testClass = context.getRequiredTestClass();
+ Store store = getStore(context);
+ return store.getOrComputeIfAbsent(testClass, (key)->{return new DockerComposeManager();}, DockerComposeManager.class);
+ }
+
+ private static Store getStore(ExtensionContext context) {
+ return context.getRoot().getStore(NAMESPACE);
+ }
+
+ private static class DefaultDockerComposeInfo implements DockerComposeInfo {
+ private final DockerComposeManager dockerComposeManager;
+
+ public DefaultDockerComposeInfo(DockerComposeManager dockerComposeManager) {
+ this.dockerComposeManager = dockerComposeManager;
+ }
+
+ @Override
+ public DockerComposeCluster id(String id) {
+ return new DefaultDockerComposeCluster(dockerComposeManager, id);
+ }
+ }
+
+ private static class DefaultDockerComposeCluster implements DockerComposeCluster {
+
+ private final DockerComposeManager dockerComposeManager;
+ private final String id;
+
+ public DefaultDockerComposeCluster(DockerComposeManager dockerComposeManager, String id) {
+ this.dockerComposeManager = dockerComposeManager;
+ this.id = id;
+ }
+
+ @Override
+ public DockerComposeRule getRule() {
+ return dockerComposeManager.getRule(id);
+ }
+
+ @Override
+ public void start() {
+ dockerComposeManager.startId(id);
+ }
+
+ @Override
+ public void stop() {
+ dockerComposeManager.stopId(id);
+ }
+ }
+}
diff --git a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java
similarity index 74%
rename from spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java
rename to spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java
index 3c146901d9..4df416d24e 100644
--- a/spring-cloud-dataflow-completion/src/test/support/boot13/src/main/java/com/acme/boot13/Main.java
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeInfo.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2016 the original author or authors.
+ * Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,12 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
-package com.acme.boot13;
+public interface DockerComposeInfo {
-public class Main {
-
- public static void main(String[] args) {
- System.out.println("Hello World");
- }
+ DockerComposeCluster id(String id);
}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java
new file mode 100644
index 0000000000..1afc75d1e8
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposeManager.java
@@ -0,0 +1,273 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule;
+import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule.Builder;
+import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles;
+
+import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthChecks.toHaveAllPortsOpen;
+
+/**
+ *
+ * @author Janne Valkealahti
+ *
+ */
+public class DockerComposeManager {
+
+ private final Map rules = new HashMap<>();
+ private final Map> classKeys = new HashMap<>();
+ private final Map> methodKeys = new HashMap<>();
+
+ public DockerComposeManager() {}
+
+ public void addClassDockerComposeData(String classKey, DockerComposeData dockerComposeData) {
+ String key = dockerComposeData.id + "$" + classKey;
+ classKeys.putIfAbsent(key, new ArrayList<>());
+ classKeys.get(key).add(dockerComposeData);
+ }
+
+ public void addMethodDockerComposeData(String classKey, String methodKey, DockerComposeData dockerComposeData) {
+ String key = dockerComposeData.id + "$" + classKey;
+ if (classKeys.containsKey(key)) {
+ classKeys.get(key).add(dockerComposeData);
+ }
+ else {
+ key = dockerComposeData.id + "$" + classKey + methodKey;
+ methodKeys.putIfAbsent(key, new ArrayList<>());
+ methodKeys.get(key).add(dockerComposeData);
+ }
+ }
+
+ public DockerComposeRule getRule(String id) {
+ for (Entry e : rules.entrySet()) {
+ String idMatch = e.getKey().substring(0, e.getKey().indexOf("$"));
+ if (id.equals(idMatch)) {
+ return e.getValue();
+ }
+ }
+ throw new IllegalArgumentException("Id " + id + " not found");
+ }
+
+ public void build(String classKey, String methodKey) {
+
+ ArrayList toStart = new ArrayList<>();
+
+ // class level
+ for (Entry> e : classKeys.entrySet()) {
+ String key = e.getKey();
+ ArrayList locations = new ArrayList<>();
+ ArrayList services = new ArrayList<>();
+ boolean start = true;
+ Integer order = Integer.MAX_VALUE;
+ String log = "";
+ for (DockerComposeData dockerComposeData : e.getValue()) {
+ locations.addAll(Arrays.asList(dockerComposeData.getLocations()));
+ services.addAll(Arrays.asList(dockerComposeData.getServices()));
+ if (!dockerComposeData.isStart()) {
+ start = false;
+ }
+ if (dockerComposeData.getOrder() < order) {
+ order = dockerComposeData.getOrder();
+ }
+ if (dockerComposeData.getLog() != null && dockerComposeData.getLog().length() > 0) {
+ log = dockerComposeData.getLog();
+ }
+ }
+ Builder> builder = DockerComposeRule.builder();
+ builder.files(DockerComposeFiles.from(locations.toArray(new String[0])));
+ for (String service : services) {
+ builder.waitingForService(service, toHaveAllPortsOpen(), DockerComposeRule.DEFAULT_TIMEOUT);
+ }
+ builder.saveLogsTo("build/test-docker-logs/" + log + classKey + "-" + methodKey);
+ DockerComposeRule rule = builder.build();
+ rules.put(key, rule);
+ if (start) {
+ toStart.add(new OrderingWrapper(order, rule));
+ }
+ }
+
+ // method level
+ for (Entry> e : methodKeys.entrySet()) {
+ String key = e.getKey();
+ ArrayList locations = new ArrayList<>();
+ ArrayList services = new ArrayList<>();
+ boolean start = true;
+ Integer order = Integer.MAX_VALUE;
+ String log = "";
+ for (DockerComposeData dockerComposeData : e.getValue()) {
+ locations.addAll(Arrays.asList(dockerComposeData.getLocations()));
+ services.addAll(Arrays.asList(dockerComposeData.getServices()));
+ if (!dockerComposeData.isStart()) {
+ start = false;
+ }
+ if (dockerComposeData.getOrder() < order) {
+ order = dockerComposeData.getOrder();
+ }
+ if (dockerComposeData.getLog() != null && dockerComposeData.getLog().length() > 0) {
+ log = dockerComposeData.getLog();
+ }
+ }
+ Builder> builder = DockerComposeRule.builder();
+ builder.files(DockerComposeFiles.from(locations.toArray(new String[0])));
+ for (String service : services) {
+ builder.waitingForService(service, toHaveAllPortsOpen(), DockerComposeRule.DEFAULT_TIMEOUT);
+ }
+ builder.saveLogsTo("build/test-docker-logs/" + log + classKey + "-" + methodKey);
+ DockerComposeRule rule = builder.build();
+ rules.put(key, rule);
+ if (start) {
+ toStart.add(new OrderingWrapper(order, rule));
+ }
+ }
+
+ Collections.sort(toStart);
+ for (OrderingWrapper w : toStart) {
+ try {
+ w.getRule().before();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ }
+
+ public void stop(String classKey, String methodKey) {
+ ArrayList toRemove = new ArrayList<>();
+ for (Entry e : rules.entrySet()) {
+ String idMatch = e.getKey().substring(e.getKey().indexOf("$") + 1, e.getKey().length());
+ if (idMatch.equals(classKey)) {
+ toRemove.add(e.getKey());
+ }
+ if (idMatch.equals(classKey + methodKey)) {
+ toRemove.add(e.getKey());
+ }
+ }
+ for (String remove : toRemove) {
+ DockerComposeRule rule = rules.remove(remove);
+ if (rule != null) {
+ rule.after();
+ }
+ }
+ // for now, just clear both class and method keys
+ classKeys.clear();
+ methodKeys.clear();
+ }
+
+ public void startId(String id) {
+ DockerComposeRule rule = null;
+ for (Entry e : rules.entrySet()) {
+ String idMatch = e.getKey().substring(0, e.getKey().indexOf("$"));
+ if (id.equals(idMatch)) {
+ rule = e.getValue();
+ }
+ }
+ if (rule != null) {
+ try {
+ rule.before();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ public void stopId(String id) {
+ DockerComposeRule rule = null;
+ for (Entry e : rules.entrySet()) {
+ String idMatch = e.getKey().substring(0, e.getKey().indexOf("$"));
+ if (id.equals(idMatch)) {
+ rule = e.getValue();
+ }
+ }
+ if (rule != null) {
+ rule.after();
+ }
+ }
+
+ public static class DockerComposeData {
+
+ private final String id;
+ private final boolean start;
+ private final String[] locations;
+ private final String[] services;
+ private final String log;
+ private final int order;
+
+ public DockerComposeData(String id, String[] locations, String[] services, String log, boolean start, int order) {
+ this.id = id;
+ this.locations = locations;
+ this.services = services;
+ this.log = log;
+ this.start = start;
+ this.order = order;
+ }
+
+ public String[] getLocations() {
+ return locations;
+ }
+
+ public String[] getServices() {
+ return services;
+ }
+
+ public String getLog() {
+ return log;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public boolean isStart() {
+ return start;
+ }
+
+ public int getOrder() {
+ return order;
+ }
+ }
+
+ private static class OrderingWrapper implements Comparable{
+ Integer order;
+ DockerComposeRule rule;
+
+ public OrderingWrapper(Integer order, DockerComposeRule rule) {
+ this.order = order;
+ this.rule = rule;
+ }
+
+ public Integer getOrder() {
+ return order;
+ }
+
+ public DockerComposeRule getRule() {
+ return rule;
+ }
+
+ @Override
+ public int compareTo(OrderingWrapper o) {
+ return getOrder().compareTo(o.getOrder());
+ }
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java
new file mode 100644
index 0000000000..303eec75de
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerComposes.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Documented
+@Inherited
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+public @interface DockerComposes {
+
+ DockerCompose[] value();
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java
new file mode 100644
index 0000000000..4b641ad6fc
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/java/org/springframework/cloud/dataflow/common/test/docker/junit5/LegacyDockerComposeExtension.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.util.List;
+
+import org.junit.jupiter.api.extension.AfterAllCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
+import org.junit.jupiter.api.extension.ExtensionContext;
+
+import org.springframework.cloud.dataflow.common.test.docker.compose.DockerComposeRule;
+import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles;
+import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterWait;
+import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogCollector;
+
+public class LegacyDockerComposeExtension extends DockerComposeRule implements BeforeAllCallback, AfterAllCallback {
+
+ private LegacyDockerComposeExtension(DockerComposeFiles files, List clusterWaits,
+ LogCollector logCollector, DockerMachine machine, boolean pullOnStartup, ProjectName projectName) {
+ super(files, clusterWaits, logCollector, machine, pullOnStartup, projectName);
+ }
+
+ @Override
+ public void afterAll(ExtensionContext context) throws Exception {
+ after();
+ }
+
+ @Override
+ public void beforeAll(ExtensionContext context) throws Exception {
+ before();
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ public static class Builder extends DockerComposeRule.Builder {
+
+ @Override
+ public LegacyDockerComposeExtension build() {
+ return new LegacyDockerComposeExtension(files, clusterWaits, logCollector, machine, pullOnStartup,
+ projectName);
+ }
+ }
+}
diff --git a/spring-cloud-dataflow-shell-core/.jdk8 b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/resources/application.properties
similarity index 100%
rename from spring-cloud-dataflow-shell-core/.jdk8
rename to spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/main/resources/application.properties
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java
new file mode 100644
index 0000000000..144dcaf0f5
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose1Tests.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.io.IOException;
+
+import org.junit.jupiter.api.Test;
+
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.catchThrowable;
+
+@DockerCompose(locations = {"src/test/resources/docker-compose-1.yml"})
+class DockerCompose1Tests {
+
+ @Test
+ void compose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException {
+ assertThat(dockerComposeInfo).isNotNull();
+ assertThat(dockerComposeInfo.id("").getRule()).isNotNull();
+ assertThat(dockerComposeInfo.id("").getRule().containers().container("testservice1")).isNotNull();
+
+ Throwable thrown = catchThrowable(() -> {
+ dockerComposeInfo.id("").getRule().containers().container("testservice2").state();
+ });
+ assertThat(thrown)
+ .isInstanceOf(DockerExecutionException.class)
+ .hasNoCause();
+ assertThat(thrown).message()
+ .containsIgnoringCase("No such service: testservice2");
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java
new file mode 100644
index 0000000000..fe0081fd15
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose2Tests.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.io.IOException;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+
+@DockerCompose(id = DockerCompose2Tests.CLUSTER1, locations = {"src/test/resources/docker-compose-1.yml"})
+@DockerCompose(id = DockerCompose2Tests.CLUSTER2, locations = {"src/test/resources/docker-compose-2.yml"}, start = false)
+@ExtendWith(DockerComposeExtension.class)
+public class DockerCompose2Tests {
+
+ public final static String CLUSTER1 = "dc1";
+ public final static String CLUSTER2 = "dc2";
+ public final static String CLUSTER3 = "dc3";
+ public final static String CLUSTER4 = "dc4";
+
+ @Test
+ @DockerCompose(id = DockerCompose2Tests.CLUSTER3, locations = {"src/test/resources/docker-compose-3.yml"})
+ @DockerCompose(id = DockerCompose2Tests.CLUSTER4, locations = {"src/test/resources/docker-compose-4.yml"}, start = false)
+ void compose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException {
+
+ dockerComposeInfo.id(CLUSTER2).start();
+ Thread.sleep(1000);
+ dockerComposeInfo.id(CLUSTER4).start();
+ Thread.sleep(1000);
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java
new file mode 100644
index 0000000000..a78fb664da
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/java/org/springframework/cloud/dataflow/common/test/docker/junit5/DockerCompose3Tests.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.junit5;
+
+import java.io.IOException;
+
+import org.junit.jupiter.api.Test;
+
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutionException;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.catchThrowable;
+
+@DockerCompose(locations = {"classpath:org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml"})
+class DockerCompose3Tests {
+
+ @Test
+ void compose(DockerComposeInfo dockerComposeInfo) throws IOException, InterruptedException {
+ assertThat(dockerComposeInfo).isNotNull();
+ assertThat(dockerComposeInfo.id("").getRule()).isNotNull();
+ assertThat(dockerComposeInfo.id("").getRule().containers().container("testservice1")).isNotNull();
+
+ Throwable thrown = catchThrowable(() -> {
+ dockerComposeInfo.id("").getRule().containers().container("testservice2").state();
+ });
+ assertThat(thrown).isInstanceOf(DockerExecutionException.class).hasNoCause()
+ .message().containsIgnoringCase("No such service: testservice2");
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml
new file mode 100644
index 0000000000..cb8dbff2d9
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-1.yml
@@ -0,0 +1,4 @@
+services:
+ testservice1:
+ image: 'springcloud/openjdk:latest'
+ command: sh -c 'sleep 60'
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml
new file mode 100644
index 0000000000..4500793c1f
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-2.yml
@@ -0,0 +1,5 @@
+services:
+ testservice2:
+ image: 'springcloud/openjdk:latest'
+ command: sh -c 'sleep 60'
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml
new file mode 100644
index 0000000000..38da37eb91
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-3.yml
@@ -0,0 +1,5 @@
+services:
+ testservice3:
+ image: 'springcloud/openjdk:latest'
+ command: sh -c 'sleep 60'
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml
new file mode 100644
index 0000000000..1605ea0e78
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-4.yml
@@ -0,0 +1,4 @@
+services:
+ testservice4:
+ image: 'springcloud/openjdk:latest'
+ command: sh -c 'sleep 60'
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml
new file mode 100644
index 0000000000..c7e4357f6c
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-5.yml
@@ -0,0 +1,4 @@
+services:
+ testservice5:
+ image: 'springcloud/openjdk:latest'
+ command: sh -c 'sleep 60'
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml
new file mode 100644
index 0000000000..682a582af4
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/docker-compose-6.yml
@@ -0,0 +1,5 @@
+services:
+ testservice6:
+ image: 'springcloud/openjdk:latest'
+ command: sh -c 'sleep 60'
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml
new file mode 100644
index 0000000000..cb8dbff2d9
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker-junit5/src/test/resources/org/springframework/cloud/dataflow/common/test/docker/junit5/docker-compose-cp1.yml
@@ -0,0 +1,4 @@
+services:
+ testservice1:
+ image: 'springcloud/openjdk:latest'
+ command: sh -c 'sleep 60'
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml
new file mode 100644
index 0000000000..efdf8e931d
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/pom.xml
@@ -0,0 +1,60 @@
+
+
+ 4.0.0
+
+ spring-cloud-dataflow-common-test-docker
+ jar
+
+ Spring Cloud Dataflow Common Docker Test Support
+ Utilities to help using docker
+
+
+ org.springframework.cloud
+ spring-cloud-dataflow-common-parent
+ 3.0.0-SNAPSHOT
+
+
+ true
+
+
+
+ org.springframework
+ spring-core
+
+
+ commons-io
+ commons-io
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ org.slf4j
+ slf4j-api
+
+
+ org.awaitility
+ awaitility
+
+
+ joda-time
+ joda-time
+
+
+ com.github.zafarkhaja
+ java-semver
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+ junit
+ junit
+ test
+
+
+
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java
new file mode 100644
index 0000000000..448a116b06
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/DockerComposeRule.java
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2018-2021 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.joda.time.Duration;
+import org.joda.time.ReadableDuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.DockerComposeFiles;
+import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ProjectName;
+import org.springframework.cloud.dataflow.common.test.docker.compose.configuration.ShutdownStrategy;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Cluster;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.Container;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.ContainerCache;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerMachine;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.DockerPort;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterWait;
+import org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.HealthCheck;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.ConflictingContainerRemovingDockerCompose;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DefaultDockerCompose;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.Docker;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerCompose;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecArgument;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecOption;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeExecutable;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeRunArgument;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerComposeRunOption;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.DockerExecutable;
+import org.springframework.cloud.dataflow.common.test.docker.compose.execution.RetryingDockerCompose;
+import org.springframework.cloud.dataflow.common.test.docker.compose.logging.DoNothingLogCollector;
+import org.springframework.cloud.dataflow.common.test.docker.compose.logging.FileLogCollector;
+import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogCollector;
+import org.springframework.cloud.dataflow.common.test.docker.compose.logging.LogDirectory;
+
+import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck.serviceHealthCheck;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.connection.waiting.ClusterHealthCheck.transformingHealthCheck;
+
+public class DockerComposeRule {
+
+ public static final Duration DEFAULT_TIMEOUT = Duration.standardMinutes(2);
+ public static final int DEFAULT_RETRY_ATTEMPTS = 2;
+ private ProjectName projectName;
+
+ private static final Logger log = LoggerFactory.getLogger(DockerComposeRule.class);
+
+ public DockerPort hostNetworkedPort(int port) {
+ return new DockerPort(machine().getIp(), port, port);
+ }
+
+ private DockerComposeFiles files;
+ private List clusterWaits;
+ private LogCollector logCollector;
+ private DockerMachine machine;
+ private boolean pullOnStartup;
+
+ protected DockerComposeRule() {}
+
+ public DockerComposeRule(DockerComposeFiles files, List clusterWaits, LogCollector logCollector,
+ DockerMachine machine, boolean pullOnStartup, ProjectName projectName) {
+ super();
+ this.files = files;
+ this.clusterWaits = clusterWaits;
+ this.logCollector = logCollector;
+ this.machine = machine;
+ this.pullOnStartup = pullOnStartup;
+ this.projectName = projectName != null ? projectName : ProjectName.random();
+ }
+
+ public DockerComposeFiles files() {
+ return files;
+ }
+
+ public List clusterWaits() {
+ return clusterWaits;
+ }
+
+ public DockerMachine machine() {
+ return machine != null ? machine : DockerMachine.localMachine().build();
+ }
+
+ public ProjectName projectName() {
+ return projectName;
+ }
+
+ public DockerComposeExecutable dockerComposeExecutable() {
+ return DockerComposeExecutable.builder()
+ .dockerComposeFiles(files())
+ .dockerConfiguration(machine())
+ .projectName(projectName())
+ .build();
+ }
+
+ public DockerExecutable dockerExecutable() {
+ return DockerExecutable.builder()
+ .dockerConfiguration(machine())
+ .build();
+ }
+
+ public Docker docker() {
+ return new Docker(dockerExecutable());
+ }
+
+ public ShutdownStrategy shutdownStrategy() {
+ return ShutdownStrategy.KILL_DOWN;
+ }
+
+ public DockerCompose dockerCompose() {
+ DockerCompose dockerCompose = new DefaultDockerCompose(dockerComposeExecutable(), machine());
+ return new RetryingDockerCompose(retryAttempts(), dockerCompose);
+ }
+
+ public Cluster containers() {
+ return Cluster.builder()
+ .ip(machine().getIp())
+ .containerCache(new ContainerCache(docker(), dockerCompose()))
+ .build();
+ }
+
+ protected int retryAttempts() {
+ return DEFAULT_RETRY_ATTEMPTS;
+ }
+
+ protected boolean removeConflictingContainersOnStartup() {
+ return true;
+ }
+
+ protected boolean pullOnStartup() {
+ return pullOnStartup;
+ }
+
+ protected ReadableDuration nativeServiceHealthCheckTimeout() {
+ return DEFAULT_TIMEOUT;
+ }
+
+ protected LogCollector logCollector() {
+ return logCollector != null ? logCollector : new DoNothingLogCollector();
+ }
+
+ public void before() throws IOException, InterruptedException {
+ log.debug("Starting docker-compose cluster");
+ if (pullOnStartup()) {
+ dockerCompose().pull();
+ }
+
+ dockerCompose().build();
+
+ DockerCompose upDockerCompose = dockerCompose();
+ if (removeConflictingContainersOnStartup()) {
+ upDockerCompose = new ConflictingContainerRemovingDockerCompose(upDockerCompose, docker());
+ }
+ upDockerCompose.up();
+
+ logCollector().startCollecting(dockerCompose());
+ log.debug("Waiting for services");
+ new ClusterWait(ClusterHealthCheck.nativeHealthChecks(), nativeServiceHealthCheckTimeout())
+ .waitUntilReady(containers());
+ clusterWaits().forEach(clusterWait -> clusterWait.waitUntilReady(containers()));
+ log.debug("docker-compose cluster started");
+ }
+
+ public void after() {
+ try {
+ shutdownStrategy().shutdown(this.dockerCompose(), this.docker());
+ logCollector().stopCollecting();
+ } catch (IOException | InterruptedException e) {
+ throw new RuntimeException("Error cleaning up docker compose cluster", e);
+ }
+ }
+
+ public String exec(DockerComposeExecOption options, String containerName,
+ DockerComposeExecArgument arguments) throws IOException, InterruptedException {
+ return dockerCompose().exec(options, containerName, arguments);
+ }
+
+ public String run(DockerComposeRunOption options, String containerName,
+ DockerComposeRunArgument arguments) throws IOException, InterruptedException {
+ return dockerCompose().run(options, containerName, arguments);
+ }
+
+ public static Builder> builder() {
+ return new Builder<>();
+ }
+
+ public static class Builder> {
+
+ protected DockerComposeFiles files;
+ protected List clusterWaits = new ArrayList<>();
+ protected LogCollector logCollector;
+ protected DockerMachine machine;
+ protected boolean pullOnStartup;
+ protected ProjectName projectName;
+
+ public T files(DockerComposeFiles files) {
+ this.files = files;
+ return self();
+ }
+
+ public T file(String dockerComposeYmlFile) {
+ return files(DockerComposeFiles.from(dockerComposeYmlFile));
+ }
+
+ /**
+ * Save the output of docker logs to files, stored in the path directory.
+ *
+ * See {@link LogDirectory} for some useful utilities, for example:
+ * {@link LogDirectory#circleAwareLogDirectory}.
+ *
+ * @param path directory into which log files should be saved
+ * @return builder for chaining
+ */
+ public T saveLogsTo(String path) {
+ return logCollector(FileLogCollector.fromPath(path));
+ }
+
+ public T logCollector(LogCollector logCollector) {
+ this.logCollector = logCollector;
+ return self();
+ }
+
+ public T waitingForService(String serviceName, HealthCheck healthCheck, ReadableDuration timeout) {
+ ClusterHealthCheck clusterHealthCheck = serviceHealthCheck(serviceName, healthCheck);
+ return addClusterWait(new ClusterWait(clusterHealthCheck, timeout));
+ }
+
+ private T addClusterWait(ClusterWait clusterWait) {
+ clusterWaits.add(clusterWait);
+ return self();
+ }
+
+ public T waitingForServices(List services, HealthCheck> healthCheck) {
+ return waitingForServices(services, healthCheck, DEFAULT_TIMEOUT);
+ }
+
+ public T waitingForServices(List services, HealthCheck> healthCheck, ReadableDuration timeout) {
+ ClusterHealthCheck clusterHealthCheck = serviceHealthCheck(services, healthCheck);
+ return addClusterWait(new ClusterWait(clusterHealthCheck, timeout));
+ }
+
+ public T waitingForHostNetworkedPort(int port, HealthCheck healthCheck) {
+ return waitingForHostNetworkedPort(port, healthCheck, DEFAULT_TIMEOUT);
+ }
+
+ public T waitingForHostNetworkedPort(int port, HealthCheck healthCheck, ReadableDuration timeout) {
+ ClusterHealthCheck clusterHealthCheck = transformingHealthCheck(cluster -> new DockerPort(cluster.ip(), port, port), healthCheck);
+ return addClusterWait(new ClusterWait(clusterHealthCheck, timeout));
+ }
+
+ public T clusterWaits(Iterable extends ClusterWait> elements) {
+ elements.forEach(e -> clusterWaits.add(e));
+ return self();
+ }
+
+ public T machine(DockerMachine machine) {
+ this.machine = machine;
+ return self();
+ }
+
+ public T pullOnStartup(boolean pullOnStartup) {
+ this.pullOnStartup = pullOnStartup;
+ return self();
+ }
+
+ public T projectName(ProjectName projectName) {
+ this.projectName = projectName;
+ return self();
+ }
+
+ @SuppressWarnings("unchecked")
+ final T self() {
+ return (T) this;
+ }
+
+ public DockerComposeRule build() {
+ return new DockerComposeRule(files, clusterWaits, logCollector, machine, pullOnStartup, projectName);
+ }
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java
new file mode 100644
index 0000000000..aad7b918e9
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/AdditionalEnvironmentValidator.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.springframework.util.Assert;
+
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY;
+
+public final class AdditionalEnvironmentValidator {
+
+ private static final Set ILLEGAL_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_HOST, DOCKER_CERT_PATH));
+
+ private AdditionalEnvironmentValidator() {}
+
+ public static Map validate(Map additionalEnvironment) {
+ HashSet invalidVariables = new HashSet<>(additionalEnvironment.keySet());
+ invalidVariables.retainAll(ILLEGAL_VARIABLES);
+
+ String errorMessage = invalidVariables.stream()
+ .collect(Collectors.joining(", ",
+ "The following variables: ",
+ " cannot exist in your additional environment variable block as they will interfere with Docker."));
+ Assert.state(invalidVariables.isEmpty(), errorMessage);
+ return additionalEnvironment;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java
new file mode 100644
index 0000000000..084a2c9334
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonEnvironmentValidator.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.springframework.util.Assert;
+
+import static java.util.stream.Collectors.joining;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY;
+
+public class DaemonEnvironmentValidator implements EnvironmentValidator {
+
+ private static final Set ILLEGAL_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_HOST, DOCKER_CERT_PATH));
+
+ private static final DaemonEnvironmentValidator INSTANCE = new DaemonEnvironmentValidator();
+
+ public static DaemonEnvironmentValidator instance() {
+ return INSTANCE;
+ }
+
+ private DaemonEnvironmentValidator() {}
+
+ @Override
+ public void validateEnvironmentVariables(Map dockerEnvironment) {
+ Set invalidVariables = ILLEGAL_VARIABLES.stream()
+ .filter(dockerEnvironment::containsKey)
+ .collect(Collectors.toSet());
+
+ String errorMessage = invalidVariables.stream()
+ .collect(joining(", ",
+ "These variables were set: ",
+ ". They cannot be set when connecting to a local docker daemon."));
+ Assert.state(invalidVariables.isEmpty(), errorMessage);
+ }
+
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java
new file mode 100644
index 0000000000..2917b70aae
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DaemonHostIpResolver.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+public class DaemonHostIpResolver implements HostIpResolver {
+
+ public static final String LOCALHOST = "127.0.0.1";
+
+ @Override
+ public String resolveIp(String dockerHost) {
+ return LOCALHOST;
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java
new file mode 100644
index 0000000000..8427cc0b76
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerComposeFiles.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2018-2020 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.io.File;
+import java.net.URL;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+import org.springframework.util.Assert;
+
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toList;
+
+public class DockerComposeFiles {
+
+ private final List dockerComposeFiles;
+
+ public DockerComposeFiles(List dockerComposeFiles) {
+ this.dockerComposeFiles = dockerComposeFiles;
+ }
+
+ public static DockerComposeFiles from(String... dockerComposeFilenames) {
+ List dockerComposeFiles = Arrays.asList(dockerComposeFilenames).stream()
+ .map(fileName -> {
+ Path path = null;
+ if (fileName.startsWith("classpath:")) {
+ URL resourceUrl = ClassLoader.getSystemResource(fileName.substring(10));
+ if (resourceUrl == null) {
+ throw new IllegalArgumentException("Can't find resource " + fileName);
+ }
+ try {
+ path = Paths.get(resourceUrl.toURI());
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Can't find resource " + fileName, e);
+ }
+ } else {
+ path = Paths.get(fileName);
+ }
+ return path;
+ })
+ .map(path -> path.toFile())
+ .collect(toList());
+ validateAtLeastOneComposeFileSpecified(dockerComposeFiles);
+ validateComposeFilesExist(dockerComposeFiles);
+ return new DockerComposeFiles(dockerComposeFiles);
+ }
+
+ public static DockerComposeFiles fromxx(String... dockerComposeFilenames) {
+ List dockerComposeFiles = Arrays.asList(dockerComposeFilenames).stream()
+ .map(File::new)
+ .collect(toList());
+ validateAtLeastOneComposeFileSpecified(dockerComposeFiles);
+ validateComposeFilesExist(dockerComposeFiles);
+ return new DockerComposeFiles(dockerComposeFiles);
+ }
+
+ public List constructComposeFileCommand() {
+ return dockerComposeFiles.stream()
+ .map(File::getAbsolutePath)
+ .map(f -> Arrays.asList("--file", f))
+ .flatMap(Collection::stream)
+ .collect(toList());
+ }
+
+ private static void validateAtLeastOneComposeFileSpecified(List dockerComposeFiles) {
+ Assert.state(!dockerComposeFiles.isEmpty(), "A docker compose file must be specified.");
+ }
+
+ private static void validateComposeFilesExist(List dockerComposeFiles) {
+ List missingFiles = dockerComposeFiles.stream()
+ .filter(f -> !f.exists())
+ .collect(toList());
+ String errorMessage = missingFiles.stream()
+ .map(File::getAbsolutePath)
+ .collect(joining(", ", "The following docker-compose files: ", " do not exist."));
+ Assert.state(missingFiles.isEmpty(), errorMessage);
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java
new file mode 100644
index 0000000000..92038f3dab
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/DockerType.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.util.Map;
+import java.util.Optional;
+
+public enum DockerType implements HostIpResolver, EnvironmentValidator {
+ DAEMON(DaemonEnvironmentValidator.instance(), new DaemonHostIpResolver()),
+ REMOTE(RemoteEnvironmentValidator.instance(), new RemoteHostIpResolver());
+
+ private final EnvironmentValidator validator;
+ private final HostIpResolver resolver;
+
+ DockerType(EnvironmentValidator validator, HostIpResolver resolver) {
+ this.validator = validator;
+ this.resolver = resolver;
+ }
+
+ @Override
+ public void validateEnvironmentVariables(Map dockerEnvironment) {
+ validator.validateEnvironmentVariables(dockerEnvironment);
+ }
+
+ @Override
+ public String resolveIp(String dockerHost) {
+ return resolver.resolveIp(dockerHost);
+ }
+
+ public static Optional getFirstValidDockerTypeForEnvironment(Map environment) {
+ for (DockerType currType : DockerType.values()) {
+ try {
+ currType.validateEnvironmentVariables(environment);
+ return Optional.of(currType);
+ } catch (IllegalStateException e) {
+ // ignore and try next type
+ }
+ }
+ return Optional.empty();
+ }
+
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java
new file mode 100644
index 0000000000..e2b7137e03
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentValidator.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.util.Map;
+
+public interface EnvironmentValidator {
+
+ /**
+ * Validates that the entries in the provided map are valid for the current environment.
+ * The provided map represents the environment variables that should be used for the
+ * process, where the keys are the environment variable names and the values are the values.
+ * If the validator determines the state represented by the map is invalid (either because
+ * required values are missing or forbidden values are present), the method should throw
+ * an exception.
+ *
+ * @param dockerEnvironment A map representing the docker environment
+ */
+ void validateEnvironmentVariables(Map dockerEnvironment);
+
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java
new file mode 100644
index 0000000000..141a667401
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/EnvironmentVariables.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+public interface EnvironmentVariables {
+
+ String TCP_PROTOCOL = "tcp://";
+ String DOCKER_CERT_PATH = "DOCKER_CERT_PATH";
+ String DOCKER_HOST = "DOCKER_HOST";
+ String DOCKER_TLS_VERIFY = "DOCKER_TLS_VERIFY";
+
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java
new file mode 100644
index 0000000000..3136bf4388
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/HostIpResolver.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+public interface HostIpResolver {
+
+ String resolveIp(String dockerHost);
+
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java
new file mode 100644
index 0000000000..1541717ea9
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/PackageVisible.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+
+@Target({ElementType.PACKAGE, ElementType.TYPE})
+@interface PackageVisible {}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java
new file mode 100644
index 0000000000..17bd270f03
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/ProjectName.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.UUID;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+
+import org.springframework.util.Assert;
+
+@PackageVisible
+public class ProjectName {
+
+ private String projectName;
+
+ public ProjectName(String projectName) {
+ this.projectName = projectName;
+ validate();
+ }
+
+ protected String projectName() {
+ return projectName;
+ }
+
+ protected void validate() {
+ Assert.state(projectName().trim().length() > 0, "ProjectName must not be blank.");
+ Assert.state(validCharacters(),
+ "ProjectName '" + projectName() + "' not allowed, please use lowercase letters and numbers only.");
+ }
+
+ // Only allows strings that docker-compose-cli would not modify
+ // https://github.com/docker/compose/blob/85e2fb63b3309280a602f1f76d77d3a82e53b6c2/compose/cli/command.py#L84
+ protected boolean validCharacters() {
+ Predicate illegalCharacters = Pattern.compile("[^a-z0-9]").asPredicate();
+ return !illegalCharacters.test(projectName());
+ }
+
+ public String asString() {
+ return projectName();
+ }
+
+ public List constructComposeFileCommand() {
+ return Arrays.asList("--project-name", projectName());
+ }
+
+ public static ProjectName random() {
+ return ProjectName.of(UUID.randomUUID().toString().substring(0, 8));
+ }
+
+ /**
+ * A name consisting of lowercase letters and numbers only.
+ *
+ * @param name the name
+ * @return project name
+ */
+ public static ProjectName fromString(String name) {
+ return ProjectName.of(name);
+ }
+
+ private static ProjectName of(String name) {
+ return new ProjectName(name);
+ }
+}
diff --git a/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java
new file mode 100644
index 0000000000..8013335c06
--- /dev/null
+++ b/spring-cloud-dataflow-common/spring-cloud-dataflow-common-test-docker/src/main/java/org/springframework/cloud/dataflow/common/test/docker/compose/configuration/RemoteEnvironmentValidator.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2018-2019 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.cloud.dataflow.common.test.docker.compose.configuration;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.springframework.util.Assert;
+import org.springframework.util.StringUtils;
+
+import static java.util.stream.Collectors.joining;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_CERT_PATH;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_HOST;
+import static org.springframework.cloud.dataflow.common.test.docker.compose.configuration.EnvironmentVariables.DOCKER_TLS_VERIFY;
+
+public class RemoteEnvironmentValidator implements EnvironmentValidator {
+
+ private static final Set SECURE_VARIABLES = new HashSet<>(Arrays.asList(DOCKER_TLS_VERIFY, DOCKER_CERT_PATH));
+ private static final RemoteEnvironmentValidator VALIDATOR = new RemoteEnvironmentValidator();
+
+ public static RemoteEnvironmentValidator instance() {
+ return VALIDATOR;
+ }
+
+ private RemoteEnvironmentValidator() {}
+
+ @Override
+ public void validateEnvironmentVariables(Map dockerEnvironment) {
+ Collection missingVariables = getMissingEnvVariables(dockerEnvironment);
+ String errorMessage = missingVariables.stream()
+ .collect(joining(", ",
+ "Missing required environment variables: ",
+ ". Please run `docker-machine env