From 1e64fe321048a157c103f7d3ea97d2a166f2a610 Mon Sep 17 00:00:00 2001 From: Corneil du Plessis Date: Fri, 21 Jul 2023 17:07:44 +0200 Subject: [PATCH] Boot 3 changes (#5404) * First pass on TASK and BATCH updates. * Just polishing up some of the code as I was reading it. (#5388) * Now throws NoSuchSchemaTarget instead of NPE (#5390) * Now throws NoSuchSchemaTarget instead of NPE * When an invalid schema is specified. * Did some polishing * Just a little more polishing * Updated with boot 3 task and batches running, CTR launching boot 2 and boot 3 jobs. Shell executing to report jobs and lists. * Updated Shell commands for jobs. * Resolve job execution and job instance controllers. Updated documentation. * Updated usage of LaunchResponse and LaunchResponseResource * TaskJobExecution was using jobExecutionId for the taskExecutionID (#5399) The DefaultTaskJobTests (integration) will now pass. * Added tasks/executions/launch to support Boot 2 and Boot 3 tasks, while tasks/executions will only work for Boot 2 workloads. * Updated for launching and backward compatibility. * Fixed retrieval of Steps. * Updated ctr properties for task names / ids. * Clean up properties for tablePrefix on ctr tasks. * Fix the request parameters in TaskTemplate.launch * Refactored for warnings. * Refactored for warnings. * Fix deleting of child executions * Update prometheus-rsocket-proxy to 1.5.2 * Task List shell command will now work properly. (#5407) An additional header of schemaType was added to task definitions. This caused the shell to crash with no exception. Also fixed tests in the DeployerPropertiesUtilTests. * Update prometheus-rsocket-proxy to 1.5.2 Fix errors in tests for more arguments. Update ctr support for END and FAIL nodes. * Fix stackoverflow in findChildTasks. --------- Co-authored-by: Glenn Renfro --- .github/workflows/ci-pr.yml | 2 +- .github/workflows/ci.yml | 2 +- build-docs.sh | 8 + pom.xml | 3 + spring-cloud-dataflow-aggregate-task/pom.xml | 104 + .../task/AggregateExecutionSupport.java | 57 + .../task/AggregateTaskConfiguration.java | 98 + .../aggregate/task/AggregateTaskExplorer.java | 169 + .../task/DataflowTaskExecutionQueryDao.java | 177 + .../aggregate/task/TaskDefinitionReader.java | 7 + .../aggregate/task/TaskDeploymentReader.java | 11 + .../task/TaskRepositoryContainer.java | 7 + ...ggregateDataFlowTaskExecutionQueryDao.java | 523 ++ .../DefaultAggregateExecutionSupport.java | 136 + .../impl/DefaultAggregateTaskExplorer.java | 279 + .../impl/DefaultTaskRepositoryContainer.java | 72 + .../src/test/resources/logback-test.xml | 6 + spring-cloud-dataflow-autoconfigure/pom.xml | 4 + .../AbstractSchedulerPerPlatformTest.java | 32 +- spring-cloud-dataflow-build/pom.xml | 13 +- .../pom.xml | 4 +- spring-cloud-dataflow-classic-docs/pom.xml | 4 + .../rest/documentation/ApiDocumentation.java | 9 +- .../AppRegistryDocumentation.java | 355 +- .../rest/documentation/BaseDocumentation.java | 102 +- .../JobExecutionsDocumentation.java | 391 +- .../JobInstancesDocumentation.java | 52 +- .../JobStepExecutionsDocumentation.java | 155 +- .../TaskExecutionsDocumentation.java | 300 +- .../documentation/TaskLogsDocumentation.java | 12 +- .../src/test/resources/rest-docs-config.yml | 3 + spring-cloud-dataflow-completion/pom.xml | 4 + .../ComposedBatchConfigurer.java | 88 +- .../ComposedTaskRunnerConfiguration.java | 92 +- .../ComposedTaskRunnerStepFactory.java | 79 +- .../ComposedTaskRunnerTaskListener.java | 23 + .../ComposedTaskStepExecutionListener.java | 61 +- .../TaskExplorerContainer.java | 59 + .../TaskLauncherTasklet.java | 145 +- .../src/main/resources/application.properties | 1 + ...kRunnerConfigurationNoPropertiesTests.java | 18 +- ...unnerConfigurationWithPropertiesTests.java | 1 + .../ComposedTaskRunnerStepFactoryTests.java | 16 +- ...omposedTaskStepExecutionListenerTests.java | 24 +- .../TaskLauncherTaskletTests.java | 96 +- spring-cloud-dataflow-core/pom.xml | 22 +- .../cloud/dataflow/core/AppRegistration.java | 6 +- .../cloud/dataflow/core/LaunchResponse.java | 59 + .../core/database}/support/DatabaseType.java | 10 +- .../database/support/IncrementerType.java | 7 + .../MariaDBSequenceMaxValueIncrementer.java | 18 + .../MultiSchemaIncrementerFactory.java | 74 + ...ultiSchemaTaskExecutionDaoFactoryBean.java | 35 + .../SqlServerSequenceMaxValueIncrementer.java | 18 + spring-cloud-dataflow-docs/pom.xml | 39 +- .../src/main/asciidoc/api-guide.adoc | 171 +- .../main/asciidoc/configuration-carvel.adoc | 46 +- .../src/main/asciidoc/configuration.adoc | 2 +- spring-cloud-dataflow-parent/pom.xml | 16 +- .../pom.xml | 4 + spring-cloud-dataflow-registry/pom.xml | 4 + .../registry/service/AppRegistryService.java | 2 +- .../service/DefaultAppRegistryService.java | 2 +- .../DefaultAppRegistryServiceTests.java | 2 +- .../rest/client/AppRegistryOperations.java | 2 +- .../rest/client/AppRegistryTemplate.java | 2 +- .../rest/client/DataFlowTemplate.java | 55 +- .../dataflow/rest/client/JobOperations.java | 10 +- .../dataflow/rest/client/JobTemplate.java | 72 +- .../dataflow/rest/client/TaskOperations.java | 13 +- .../dataflow/rest/client/TaskTemplate.java | 100 +- .../DataFlowClientAutoConfiguration.java | 5 +- .../dataflow/rest/client/dsl/task/Task.java | 87 +- .../rest/client/DataflowTemplateTests.java | 16 +- .../rest/client/TaskTemplateTests.java | 6 +- .../config/DataFlowClientPropertiesTests.java | 2 +- .../dataflow/rest/job/TaskJobExecution.java | 24 +- .../rest/job/TaskJobExecutionRel.java | 8 +- .../resource/AppRegistrationResource.java | 3 +- .../DetailedAppRegistrationResource.java | 2 +- .../rest/resource/JobExecutionResource.java | 17 +- .../resource/JobExecutionThinResource.java | 18 +- .../rest/resource/LaunchResponseResource.java | 32 + .../resource/SchemaVersionTargetResource.java | 4 +- .../SchemaVersionTargetsResource.java | 2 +- .../rest/resource/StepExecutionResource.java | 10 +- .../rest/resource/TaskExecutionResource.java | 21 +- .../rest/util/DeploymentPropertiesUtils.java | 34 +- .../resource/TaskExecutionResourceTests.java | 156 +- .../util/DeploymentPropertiesUtilsTests.java | 4 +- spring-cloud-dataflow-schema-core/pom.xml | 70 + .../schema/AggregateTaskExecution.java | 243 + .../schema}/AppBootSchemaVersion.java | 2 +- .../AppBootSchemaVersionDeserializer.java | 2 +- .../AppBootSchemaVersionSerializer.java | 2 +- .../schema}/AppBootSchemaVersions.java | 2 +- .../schema}/AppBootVersionConverter.java | 2 +- .../dataflow/schema}/SchemaVersionTarget.java | 8 +- .../schema}/SchemaVersionTargets.java | 2 +- .../schema}/AppBootSchemaVersionTests.java | 4 +- spring-cloud-dataflow-schema/pom.xml | 79 + .../schema}/service/SchemaService.java | 8 +- .../service/SchemaServiceConfiguration.java | 49 + .../service/impl/DefaultSchemaService.java | 36 +- .../impl/DefaultSchemaServiceTests.java | 16 +- spring-cloud-dataflow-server-core/pom.xml | 17 +- .../batch/JdbcSearchableJobExecutionDao.java | 24 +- .../dataflow/server/batch/JobService.java | 4 +- .../server/batch/SimpleJobService.java | 92 +- .../batch/SimpleJobServiceFactoryBean.java | 3 +- .../AggregateDataFlowTaskConfiguration.java | 167 + .../DataFlowControllerAutoConfiguration.java | 67 +- .../config/DataFlowServerConfiguration.java | 53 +- .../config/features/TaskConfiguration.java | 219 +- .../server/config/web/WebConfiguration.java | 2 +- .../controller/ApiNotSupportedException.java | 8 + .../controller/AppRegistryController.java | 28 +- .../controller/JobExecutionController.java | 97 +- .../JobExecutionThinController.java | 118 +- .../controller/JobInstanceController.java | 34 +- .../JobStepExecutionController.java | 68 +- .../JobStepExecutionProgressController.java | 89 +- .../NoSuchSchemaTargetException.java | 2 +- .../controller/RestControllerAdvice.java | 18 +- .../server/controller/RootController.java | 7 +- .../controller/RuntimeStreamsController.java | 7 +- .../server/controller/SchemaController.java | 16 +- .../StreamDefinitionController.java | 93 +- .../StreamDeploymentController.java | 12 +- .../controller/TaskDefinitionController.java | 106 +- .../controller/TaskExecutionController.java | 255 +- .../server/controller/TaskLogsController.java | 8 +- .../controller/TaskPlatformController.java | 6 +- .../controller/TaskSchedulerController.java | 30 +- .../controller/TasksInfoController.java | 8 +- .../server/controller/VisibleProperties.java | 18 +- .../DefaultTaskDefinitionAssembler.java | 40 +- ...efaultTaskDefinitionAssemblerProvider.java | 26 +- .../TaskExecutionAwareTaskDefinition.java | 7 +- .../TaskExecutionControllerDeleteAction.java | 4 +- .../AbstractAggregateViewMigration.java | 64 + .../AbstractBoot3InitialSetupMigration.java | 2 +- .../AbstractCaseSensitiveMigration.java | 53 + .../db/migration/PostgreSQLTextToOID.java | 66 + .../V7__Boot3_Add_Task3_Batch5_Schema.java | 271 +- .../migration/db2/V8__AddAggregateViews.java | 7 + .../V6__Boot3_Add_Task3_Batch5_Schema.java | 258 +- .../mariadb/V8__RenameLowerCaseTables.java | 44 + .../mariadb/V9__AddAggregateViews.java | 7 + .../V7__Boot3_Add_Task3_Batch5_Schema.java | 367 +- .../mysql/V8__RenameLowerCaseTables.java | 36 + .../mysql/V9__AddAggregateViews.java | 7 + .../V7__Boot3_Add_Task3_Batch5_Schema.java | 195 +- .../oracle/V8__AddAggregateViews.java | 7 + .../V7__Boot3_Add_Task3_Batch5_Schema.java | 199 +- .../postgresql/V8__AddAggregateViews.java | 22 + .../postgresql/V9__ChangeTextTypes.java | 39 + .../V7__Boot3_Add_Task3_Batch5_Schema.java | 202 +- .../sqlserver/V8__AddAggregateViews.java | 22 + .../server/job/TaskExplorerFactoryBean.java | 17 +- .../support/StepExecutionResourceBuilder.java | 28 +- .../repository/AggregateJobQueryDao.java | 57 + .../DataflowJobExecutionDaoContainer.java | 44 + .../DataflowTaskExecutionDaoContainer.java | 44 + .../DataflowTaskExecutionMetadataDao.java | 1 + ...flowTaskExecutionMetadataDaoContainer.java | 47 + .../DefaultTaskDefinitionReader.java | 36 + .../DefaultTaskDeploymentReader.java | 46 + .../repository/JdbcAggregateJobQueryDao.java | 774 +++ .../JdbcDataflowJobExecutionDao.java | 1 + .../JdbcDataflowTaskExecutionDao.java | 104 +- .../JdbcDataflowTaskExecutionMetadataDao.java | 66 +- .../repository/JobExecutionDaoContainer.java | 65 + .../repository/JobRepositoryContainer.java | 59 + .../NoSuchTaskExecutionException.java | 11 +- .../repository/TaskBatchDaoContainer.java | 47 + .../repository/TaskDefinitionRepository.java | 1 + .../repository/TaskDeploymentRepository.java | 4 + .../repository/TaskExecutionDaoContainer.java | 54 + .../support/MariaDBPagingQueryProvider.java | 35 + .../repository/support/SchemaUtilities.java | 15 + .../SqlPagingQueryProviderFactoryBean.java | 25 +- .../server/service/JobExplorerContainer.java | 40 + .../server/service/JobServiceContainer.java | 58 + .../server/service/TaskDeleteService.java | 22 +- .../service/TaskExecutionInfoService.java | 3 + .../server/service/TaskExecutionService.java | 12 +- .../server/service/TaskJobService.java | 58 +- .../impl/DefaultTaskDeleteService.java | 491 +- .../impl/DefaultTaskExecutionInfoService.java | 115 +- ...DefaultTaskExecutionRepositoryService.java | 26 +- .../impl/DefaultTaskExecutionService.java | 530 +- .../service/impl/DefaultTaskJobService.java | 249 +- .../server/service/impl/TaskServiceUtils.java | 6 +- .../main/resources/application-init-db2.yml | 1 + .../resources/application-init-mariadb.yml | 4 +- .../main/resources/application-init-mysql.yml | 4 + .../resources/application-init-oracle.yml | 3 +- .../resources/application-init-postgresql.yml | 2 + .../resources/application-init-sqlserver.yml | 2 + .../db/migration/h2/V1__INITIAL_SETUP.sql | 70 +- .../resources/schemas/db2/V7-dataflow.sql | 11 + .../resources/schemas/db2/V8-dataflow.sql | 29 + .../resources/schemas/mariadb/V6-dataflow.sql | 217 +- .../resources/schemas/mariadb/V8-dataflow.sql | 4 + .../resources/schemas/mariadb/V9-dataflow.sql | 29 + .../resources/schemas/mysql/V1-dataflow.sql | 2 +- .../resources/schemas/mysql/V7-dataflow.sql | 47 +- .../resources/schemas/mysql/V8-dataflow.sql | 4 + .../resources/schemas/mysql/V9-dataflow.sql | 29 + .../resources/schemas/oracle/V7-dataflow.sql | 19 +- .../resources/schemas/oracle/V8-dataflow.sql | 29 + .../schemas/postgresql/V7-dataflow.sql | 11 + .../schemas/postgresql/V8-dataflow.sql | 29 + .../schemas/postgresql/V9-dataflow.sql | 4 + .../schemas/sqlserver/V6-dataflow.sql | 34 +- .../schemas/sqlserver/V7-dataflow.sql | 29 + .../schemas/sqlserver/V8-dataflow.sql | 4 + .../DataFlowServerConfigurationTests.java | 35 +- .../config/EmptyDefaultTestApplication.java | 9 +- .../config/H2ServerConfigurationTests.java | 16 +- .../server/config/LocalPlatformTests.java | 2 +- ...licationEnvironmentPostProcessorTests.java | 7 +- .../SpringDocAutoConfigurationTests.java | 8 +- .../server/configuration/JobDependencies.java | 334 +- .../TaskServiceDependencies.java | 274 +- .../configuration/TestDependencies.java | 299 +- .../AppRegistryControllerTests.java | 89 +- .../JobExecutionControllerTests.java | 152 +- .../JobExecutionThinControllerTests.java | 35 +- .../server/controller/JobExecutionUtils.java | 75 +- .../JobInstanceControllerTests.java | 31 +- .../JobStepExecutionControllerTests.java | 41 +- .../controller/RootControllerTests.java | 10 +- .../RuntimeAppsControllerTests.java | 11 +- .../controller/TaskControllerTests.java | 72 +- .../TaskExecutionControllerTests.java | 309 +- .../controller/TaskLogsControllerTests.java | 2 + .../TaskPlatformControllerTests.java | 2 + .../controller/TasksInfoControllerTests.java | 46 +- .../db/migration/PostgreSQLTextToOIDTest.java | 161 + .../JdbcDataflowTaskExecutionDaoTests.java | 59 +- .../TaskExecutionExplorerTests.java | 98 +- .../service/impl/AggregateTaskTests.java | 157 + .../DefaultStreamServiceIntegrationTests.java | 5 +- .../impl/DefaultStreamServiceTests.java | 7 +- .../impl/DefaultTaskDeleteServiceTests.java | 89 +- .../DefaultTaskExecutionServiceTests.java | 810 +-- ...tTaskExecutionServiceTransactionTests.java | 65 +- .../impl/DefaultTaskJobServiceTests.java | 85 +- .../src/test/resources/logback-test.xml | 1 + .../resources/root-controller-result.json | 8 +- spring-cloud-dataflow-server/pom.xml | 31 +- .../single/DataFlowServerApplication.java | 2 + .../dataflow/integration/test/DataFlowIT.java | 4934 +++++++++-------- .../test/db/AbstractDataflowTests.java | 2 - .../db/migration/AbstractSmokeTest.java | 66 + .../server/db/migration/DB2SmokeTest.java | 33 + .../server/db/migration/MariaDBSmokeTest.java | 36 + .../server/db/migration/MySQL57SmokeTest.java | 35 + .../server/db/migration/MySQL8SmokeTest.java | 37 + .../server/db/migration/OracleSmokeTest.java | 36 + .../db/migration/PostgreSQLSmokeTest.java | 38 + .../db/migration/SqlServerSmokeTest.java | 37 + .../server/single/DefaultSchedulerTests.java | 3 - .../src/test/resources/logback-test.xml | 2 + spring-cloud-dataflow-shell-core/pom.xml | 2 + .../shell/command/AppRegistryCommands.java | 2 +- .../shell/command/ConfigCommands.java | 40 +- .../dataflow/shell/command/JobCommands.java | 70 +- .../dataflow/shell/command/TaskCommands.java | 75 +- .../AppBootSchemaVersionConverter.java | 2 +- .../shell/AbstractShellIntegrationTest.java | 2 +- .../command/AppRegistryCommandsTests.java | 2 +- .../shell/command/ConfigCommandTests.java | 16 +- .../shell/command/JobCommandTests.java | 41 +- .../dataflow/tasklauncher/LaunchResponse.java | 58 + .../tasklauncher/TaskLauncherFunction.java | 13 +- .../TaskLauncherFunctionApplicationTests.java | 12 +- .../pom.xml | 2 + .../sink/TaskLauncherSinkTests.java | 54 +- spring-cloud-skipper/pom.xml | 8 +- .../spring-cloud-skipper-docs/pom.xml | 37 +- .../spring-cloud-skipper-server-core/pom.xml | 8 +- .../server/domain/AppDeployerData.java | 2 + .../AbstractAssertReleaseDeployedTest.java | 42 +- ...ipperServerPlatformConfigurationTests.java | 2 +- .../controller/docs/InstallDocumentation.java | 3 +- .../spring-cloud-skipper-server/pom.xml | 65 + .../db/migration/AbstractSmokeTest.java | 73 + .../server/db/migration/DB2SmokeTest.java | 33 + .../server/db/migration/MariaDBSmokeTest.java | 34 + .../server/db/migration/MySQL57SmokeTest.java | 32 + .../server/db/migration/MySQL8SmokeTest.java | 32 + .../server/db/migration/OracleSmokeTest.java | 33 + .../db/migration/PostgreSQLSmokeTest.java | 34 + .../db/migration/SqlServerSmokeTest.java | 34 + .../skipperapp/LocalTestSkipperServer.java | 46 - .../src/test/resources/logback-test.xml | 8 + .../server/local/security/dataflow.keystore | Bin 2236 -> 0 bytes .../server/local/security/dataflow.truststore | Bin 946 -> 0 bytes .../server/local/security/oauthConfig.yml | 47 - .../support/oauth2TestServerConfig.yml | 28 - .../cloud/skipper/domain/Manifest.java | 2 + .../cloud/skipper/domain/PackageMetadata.java | 6 + .../cloud/skipper/domain/Release.java | 3 + .../cloud/skipper/domain/Repository.java | 3 + .../cloud/skipper/domain/Status.java | 2 + spring-cloud-starter-dataflow-server/pom.xml | 2 + .../single/LocalConfigurationTests.java | 2 +- .../server/single/LocalDataflowResource.java | 18 +- src/carvel/config/values/values.yml | 2 +- src/deploy/README.adoc | 2 +- src/deploy/README.html | 97 +- src/deploy/README.pdf | Bin 220737 -> 221210 bytes src/deploy/carvel/load-images.sh | 78 + src/deploy/carvel/register-apps.sh | 10 +- src/deploy/carvel/start-deploy.sh | 6 +- src/deploy/images/build-ctr-image.sh | 12 +- src/deploy/images/build-dataflow-image.sh | 12 +- src/deploy/images/build-scdf-pro-image.sh | 16 +- src/deploy/images/build-skipper-image.sh | 13 +- src/deploy/images/build-ssb-image.sh | 12 +- .../images/pull-prometheus-rsocket-proxy.sh | 2 + src/deploy/k8s/configure-k8s.sh | 5 +- src/deploy/k8s/delete-k8s-ns.sh | 4 +- src/deploy/k8s/delete-scdf.sh | 32 +- src/deploy/k8s/deploy-scdf.sh | 45 +- src/deploy/k8s/install-scdf.sh | 17 +- src/deploy/k8s/load-images.sh | 6 +- src/deploy/k8s/register-apps.sh | 22 +- src/deploy/k8s/update-scdf.sh | 119 + src/deploy/k8s/use-mk.sh | 6 +- src/deploy/k8s/yaml/kafka-svc.yaml | 17 - src/deploy/k8s/yaml/kafka-zk-svc.yaml | 17 - .../k8s/yaml/server-deployment-pro.yaml | 2 +- src/deploy/k8s/yaml/server-deployment.yaml | 22 +- src/deploy/k8s/yaml/skipper-config-kafka.yaml | 2 +- .../k8s/yaml/skipper-config-rabbit.yaml | 2 +- src/deploy/k8s/yaml/skipper-deployment.yaml | 6 +- src/deploy/shell/build-shell.sh | 12 + src/deploy/shell/deploy-ctr.sh | 8 +- src/deploy/shell/deploy-ctr3a.sh | 9 + src/deploy/shell/deploy-ctr3b.sh | 9 + .../deploy-task-demo-metrics-prometheus.sh | 7 +- src/deploy/shell/deploy-timelogger.sh | 6 +- src/deploy/shell/deploy-timestamp-batch.sh | 6 +- src/deploy/shell/deploy-timestamp-batch3.sh | 9 + src/deploy/shell/deploy-timestamp.sh | 8 + src/deploy/shell/deploy-timestamp3.sh | 9 + src/deploy/shell/run-ctr.sh | 7 + src/deploy/shell/run-ctr3a.sh | 7 + src/deploy/shell/run-ctr3b.sh | 7 + .../shell/run-task-demo-metrics-prometheus.sh | 8 + src/deploy/shell/run-timelogger.sh | 8 + src/deploy/shell/run-timestamp-batch.sh | 8 + src/deploy/shell/run-timestamp-batch3.sh | 8 + src/deploy/shell/run-timestamp.sh | 7 + src/deploy/shell/run-timestamp3.sh | 8 + src/deploy/shell/shell.sh | 9 +- src/deploy/versions.yaml | 4 +- .../docker-compose-prometheus.yml | 2 +- src/kubernetes/grafana/grafana-configmap.yaml | 2 +- src/kubernetes/kafka/kafka-deployment.yaml | 1 - src/kubernetes/kafka/kafka-svc.yaml | 1 - src/kubernetes/kafka/kafka-zk-deployment.yaml | 1 - src/kubernetes/kafka/kafka-zk-svc.yaml | 1 - .../postgresql/postgresql-deployment.yaml | 2 +- .../prometheus-proxy-clusterrolebinding.yaml | 1 - .../prometheus-proxy-deployment.yaml | 2 +- .../prometheus-proxy-serviceaccount.yaml | 2 +- .../prometheus-clusterrolebinding.yaml | 2 +- .../prometheus/prometheus-serviceaccount.yaml | 2 +- src/local/launch-dataflow.sh | 47 +- .../docker-compose-prometheus.yml | 2 +- .../prometheus-proxy-deployment.yaml | 2 +- 376 files changed, 16022 insertions(+), 7524 deletions(-) create mode 100755 build-docs.sh create mode 100644 spring-cloud-dataflow-aggregate-task/pom.xml create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateExecutionSupport.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskConfiguration.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskExplorer.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/DataflowTaskExecutionQueryDao.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDefinitionReader.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDeploymentReader.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskRepositoryContainer.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/AggregateDataFlowTaskExecutionQueryDao.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateExecutionSupport.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateTaskExplorer.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultTaskRepositoryContainer.java create mode 100644 spring-cloud-dataflow-aggregate-task/src/test/resources/logback-test.xml create mode 100644 spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java create mode 100644 spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskExplorerContainer.java create mode 100644 spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties create mode 100644 spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/LaunchResponse.java rename {spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository => spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database}/support/DatabaseType.java (95%) create mode 100644 spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java create mode 100644 spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java create mode 100644 spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java create mode 100644 spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaTaskExecutionDaoFactoryBean.java create mode 100644 spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java create mode 100644 spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/LaunchResponseResource.java create mode 100644 spring-cloud-dataflow-schema-core/pom.xml create mode 100644 spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AggregateTaskExecution.java rename {spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema}/AppBootSchemaVersion.java (97%) rename {spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema}/AppBootSchemaVersionDeserializer.java (96%) rename {spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema}/AppBootSchemaVersionSerializer.java (96%) rename {spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema}/AppBootSchemaVersions.java (97%) rename {spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema}/AppBootVersionConverter.java (95%) rename {spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema}/SchemaVersionTarget.java (89%) rename {spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema}/SchemaVersionTargets.java (96%) rename {spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core => spring-cloud-dataflow-schema-core/src/test/java/org/springframework/cloud/schema}/AppBootSchemaVersionTests.java (94%) create mode 100644 spring-cloud-dataflow-schema/pom.xml rename {spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server => spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema}/service/SchemaService.java (77%) create mode 100644 spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaServiceConfiguration.java rename {spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server => spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema}/service/impl/DefaultSchemaService.java (57%) rename {spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server => spring-cloud-dataflow-schema/src/test/java/org/springframework/cloud/schema}/service/impl/DefaultSchemaServiceTests.java (82%) create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowJobExecutionDaoContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDaoContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDaoContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobExecutionDaoContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobRepositoryContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskBatchDaoContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskExecutionDaoContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobExplorerContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/db2/V8-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/mariadb/V8-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/mariadb/V9-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/mysql/V8-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/mysql/V9-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/oracle/V8-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/postgresql/V8-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/postgresql/V9-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/sqlserver/V7-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/main/resources/schemas/sqlserver/V8-dataflow.sql create mode 100644 spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOIDTest.java create mode 100644 spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/AggregateTaskTests.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/AbstractSmokeTest.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/DB2SmokeTest.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MariaDBSmokeTest.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MySQL57SmokeTest.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/MySQL8SmokeTest.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/OracleSmokeTest.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLSmokeTest.java create mode 100644 spring-cloud-dataflow-server/src/test/java/org/springframework/cloud/dataflow/server/db/migration/SqlServerSmokeTest.java create mode 100644 spring-cloud-dataflow-tasklauncher/spring-cloud-dataflow-tasklauncher-function/src/main/java/org/springframework/cloud/dataflow/tasklauncher/LaunchResponse.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/AbstractSmokeTest.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/DB2SmokeTest.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/MariaDBSmokeTest.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/MySQL57SmokeTest.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/MySQL8SmokeTest.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/OracleSmokeTest.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/PostgreSQLSmokeTest.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/db/migration/SqlServerSmokeTest.java delete mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/java/org/springframework/cloud/skipper/server/local/security/skipperapp/LocalTestSkipperServer.java create mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/resources/logback-test.xml delete mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/resources/org/springframework/cloud/skipper/server/local/security/dataflow.keystore delete mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/resources/org/springframework/cloud/skipper/server/local/security/dataflow.truststore delete mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/resources/org/springframework/cloud/skipper/server/local/security/oauthConfig.yml delete mode 100644 spring-cloud-skipper/spring-cloud-skipper-server/src/test/resources/org/springframework/cloud/skipper/server/local/security/support/oauth2TestServerConfig.yml create mode 100755 src/deploy/carvel/load-images.sh create mode 100755 src/deploy/images/pull-prometheus-rsocket-proxy.sh create mode 100755 src/deploy/k8s/update-scdf.sh delete mode 100644 src/deploy/k8s/yaml/kafka-svc.yaml delete mode 100644 src/deploy/k8s/yaml/kafka-zk-svc.yaml create mode 100755 src/deploy/shell/build-shell.sh create mode 100755 src/deploy/shell/deploy-ctr3a.sh create mode 100755 src/deploy/shell/deploy-ctr3b.sh create mode 100755 src/deploy/shell/deploy-timestamp-batch3.sh create mode 100755 src/deploy/shell/deploy-timestamp.sh create mode 100755 src/deploy/shell/deploy-timestamp3.sh create mode 100755 src/deploy/shell/run-ctr.sh create mode 100755 src/deploy/shell/run-ctr3a.sh create mode 100755 src/deploy/shell/run-ctr3b.sh create mode 100755 src/deploy/shell/run-task-demo-metrics-prometheus.sh create mode 100755 src/deploy/shell/run-timelogger.sh create mode 100755 src/deploy/shell/run-timestamp-batch.sh create mode 100755 src/deploy/shell/run-timestamp-batch3.sh create mode 100755 src/deploy/shell/run-timestamp.sh create mode 100755 src/deploy/shell/run-timestamp3.sh diff --git a/.github/workflows/ci-pr.yml b/.github/workflows/ci-pr.yml index a5650ef779..756e65fea0 100644 --- a/.github/workflows/ci-pr.yml +++ b/.github/workflows/ci-pr.yml @@ -30,7 +30,7 @@ jobs: # build - name: Build run: | - mvn -B -s .github/settings.xml $MAVEN_THREADS clean package + mvn -B -s .github/settings.xml $MAVEN_THREADS clean install # clean m2 cache - name: Clean cache run: | diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index abfd7716d4..e878ae752e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -61,7 +61,7 @@ jobs: - name: Build and Publish run: | mvn clean - jfrog rt mvn install -Pfull -B $MAVEN_THREADS + jfrog rt mvn install -Pfull,asciidoctordocs,restdocs -B $MAVEN_THREADS jfrog rt build-publish - name: Capture Test Results if: ${{ always() }} diff --git a/build-docs.sh b/build-docs.sh new file mode 100755 index 0000000000..2f5407c9dd --- /dev/null +++ b/build-docs.sh @@ -0,0 +1,8 @@ +#!/bin/bash +SCDIR=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")") +SCDIR=$(realpath $SCDIR) +pushd "$SCDIR" > /dev/null || exit + ./mvnw install -o -am -pl :spring-cloud-dataflow-classic-docs,:spring-cloud-dataflow-docs,:spring-cloud-skipper-server-core,:spring-cloud-skipper-docs -DskipTests + ./mvnw install -o -Pfull,asciidoctordocs,restdocs -pl :spring-cloud-dataflow-classic-docs,:spring-cloud-dataflow-docs + ./mvnw install -o -Pasciidoctordocs,restdocs -pl :spring-cloud-skipper-server-core,:spring-cloud-skipper-docs +popd > /dev/null || exit diff --git a/pom.xml b/pom.xml index 0c395e5711..5c09f91d5d 100644 --- a/pom.xml +++ b/pom.xml @@ -55,7 +55,10 @@ spring-cloud-dataflow-container-registry spring-cloud-dataflow-configuration-metadata spring-cloud-dataflow-core-dsl + spring-cloud-dataflow-schema-core spring-cloud-dataflow-core + spring-cloud-dataflow-schema + spring-cloud-dataflow-aggregate-task spring-cloud-dataflow-server-core spring-cloud-dataflow-rest-resource spring-cloud-dataflow-audit diff --git a/spring-cloud-dataflow-aggregate-task/pom.xml b/spring-cloud-dataflow-aggregate-task/pom.xml new file mode 100644 index 0000000000..b313e52060 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/pom.xml @@ -0,0 +1,104 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.0-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-aggregate-task + jar + + true + + + + org.springframework + spring-core + + + org.springframework + spring-context + compile + + + org.springframework.cloud + spring-cloud-task-batch + + + org.springframework.cloud + spring-cloud-dataflow-core + ${project.version} + + + org.springframework.cloud + spring-cloud-dataflow-registry + ${project.version} + + + org.springframework.cloud + spring-cloud-dataflow-schema + ${project.version} + + + org.slf4j + slf4j-api + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.springframework.boot + spring-boot-starter-test + + + junit + junit + test + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.assertj + assertj-core + test + + + org.junit.jupiter + junit-jupiter-params + test + + + com.h2database + h2 + test + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0 + + 1 + 1 + false + + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateExecutionSupport.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateExecutionSupport.java new file mode 100644 index 0000000000..7cfc3ba5d6 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateExecutionSupport.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import org.springframework.cloud.dataflow.core.AppRegistration; +import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.task.repository.TaskExecution; + +/** + * Allows users to retrieve Task execution and SchemaVersion information from either {@link TaskExecution} as well as + * Task Name. + * @author Corneil du Plessis + */ +public interface AggregateExecutionSupport { + + /** + * Retrieves the {@link AggregateTaskExecution} for the task execution and {@link TaskDefinitionReader} provided. + * @param execution A {@link TaskExecution} that contains the TaskName that will be used to find the {@link AggregateTaskExecution}. + * @param taskDefinitionReader {@link TaskDefinitionReader} that will be used to find the {@link SchemaVersionTarget} for the task execution. + * @return The {@link AggregateTaskExecution} containing the {@link SchemaVersionTarget} for the TaskExecution. + */ + AggregateTaskExecution from(TaskExecution execution, TaskDefinitionReader taskDefinitionReader, TaskDeploymentReader taskDeploymentReader); + + /** + * Retrieves the {@link SchemaVersionTarget} for the task name. + * @param taskName The name of the {@link org.springframework.cloud.dataflow.core.TaskDefinition} from which the {@link SchemaVersionTarget} will be retreived. + * @param taskDefinitionReader {@link TaskDefinitionReader} that will be used to find the {@link SchemaVersionTarget} + * @return The {@link SchemaVersionTarget} for the taskName specified. + */ + SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinitionReader taskDefinitionReader); + SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinition taskDefinition); + + /** + * Retrieve the {@link AppRegistration} for the registeredName. + */ + AppRegistration findTaskAppRegistration(String registeredName); + + /** + * Return the {@link AggregateTaskExecution} for the {@link TaskExecution} and Schema Target name specified. + */ + AggregateTaskExecution from(TaskExecution execution, String schemaTarget, String platformName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskConfiguration.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskConfiguration.java new file mode 100644 index 0000000000..6b8b81dd2a --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskConfiguration.java @@ -0,0 +1,98 @@ +/* + * Copyright 2017-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.impl.AggregateDataFlowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultAggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultAggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultTaskRepositoryContainer; +import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.util.Assert; + +/** + * Configuration for aggregate task related components. + * + * @author Corneil du Plessis + */ +@Configuration +@Import(SchemaServiceConfiguration.class) +public class AggregateTaskConfiguration { + private static final Logger logger = LoggerFactory.getLogger(AggregateTaskConfiguration.class); + + + @Bean + public DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao( + DataSource dataSource, + SchemaService schemaService + ) { + return new AggregateDataFlowTaskExecutionQueryDao(dataSource, schemaService); + } + + @Bean + public AggregateExecutionSupport aggregateExecutionSupport( + AppRegistryService registryService, + SchemaService schemaService + ) { + return new DefaultAggregateExecutionSupport(registryService, schemaService); + } + + @Bean + public TaskRepositoryContainer taskRepositoryContainer( + DataSource dataSource, + SchemaService schemaService + ) { + return new DefaultTaskRepositoryContainer(dataSource, schemaService); + } + + @Bean + public AggregateTaskExplorer aggregateTaskExplorer( + DataSource dataSource, + DataflowTaskExecutionQueryDao taskExecutionQueryDao, + SchemaService schemaService, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader, + TaskDeploymentReader taskDeploymentReader + ) { + Assert.notNull(dataSource, "dataSource required"); + Assert.notNull(taskExecutionQueryDao, "taskExecutionQueryDao required"); + Assert.notNull(schemaService, "schemaService required"); + Assert.notNull(aggregateExecutionSupport, "aggregateExecutionSupport required"); + Assert.notNull(taskDefinitionReader, "taskDefinitionReader required"); + Assert.notNull(taskDeploymentReader, "taskDeploymentReader required"); + return new DefaultAggregateTaskExplorer(dataSource, + taskExecutionQueryDao, + schemaService, + aggregateExecutionSupport, + taskDefinitionReader, + taskDeploymentReader); + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.aggregate.task.AggregateTaskConfiguration"); + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskExplorer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskExplorer.java new file mode 100644 index 0000000000..8d1184ea6f --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/AggregateTaskExplorer.java @@ -0,0 +1,169 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +/** + * Provides for exploring tasks for multiple schema targets. + * + * @author Corneil du Plessis + */ +public interface AggregateTaskExplorer { + /** + * find a task execution given an execution id and schema target. + * + * @param executionId the task execution id + * @param schemaTarget the schema target + * @return the task execution + */ + AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget); + + /** + * find a task execution given an external execution id and platform name. + * + * @param externalExecutionId the external execution id + * @param platform the platform name + * @return the task execution + */ + AggregateTaskExecution getTaskExecutionByExternalExecutionId(String externalExecutionId, String platform); + + List findChildTaskExecutions(long executionId, String schemaTarget); + + List findChildTaskExecutions(Collection parentIds, String schemaTarget); + + /** + * Retrieve a collection of taskExecutions that have the task name provided. + * + * @param taskName the name of the task + * @param pageable the constraints for the search + * @return the set of running executions for tasks with the specified name + */ + Page findRunningTaskExecutions(String taskName, Pageable pageable); + + /** + * Retrieve a list of available task names. + * + * @return the task names that have been executed + */ + List getTaskNames(); + + /** + * Get number of executions for a taskName. + * + * @param taskName the name of the task to be searched + * @return the number of running tasks that have the taskname specified + */ + long getTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves current number of task executions. + * + * @return current number of task executions. + */ + long getTaskExecutionCount(); + + /** + * Retrieves current number of running task executions. + * + * @return current number of running task executions. + */ + long getRunningTaskExecutionCount(); + + /** + * Get a list of executions for a task by name and completion status. + * + * @param taskName the name of the task to be searched + * @param completed Indicator to find only completed tasks + * @return list of task executions + */ + List findTaskExecutionsByName(String taskName, boolean completed); + + /** + * Get a collection/page of executions. + * + * @param taskName the name of the task to be searched + * @param pageable the constraints for the search + * @return list of task executions + */ + Page findTaskExecutionsByName(String taskName, Pageable pageable); + + /** + * Retrieves all the task executions within the pageable constraints sorted by start + * date descending, taskExecution id descending. + * + * @param pageable the constraints for the search + * @return page containing the results from the search + */ + Page findAll(Pageable pageable); + + /** + * Returns the id of the TaskExecution that the requested Spring Batch job execution + * was executed within the context of. Returns null if none were found. + * + * @param jobExecutionId the id of the JobExecution + * @return the id of the {@link TaskExecution} + */ + Long getTaskExecutionIdByJobExecutionId(long jobExecutionId, String schemaTarget); + + /** + * Returns a Set of JobExecution ids for the jobs that were executed within the scope + * of the requested task. + * + * @param taskExecutionId id of the {@link TaskExecution} + * @return a Set of the ids of the job executions executed within the + * task. + */ + Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId, String schemaTarget); + + /** + * Returns a {@link List} of the latest {@link TaskExecution} for 1 or more task + * names. + *

+ * Latest is defined by the most recent start time. A {@link TaskExecution} does not + * have to be finished (The results may including pending {@link TaskExecution}s). + *

+ * It is theoretically possible that a {@link TaskExecution} with the same name to + * have more than 1 {@link TaskExecution} for the exact same start time. In that case + * the {@link TaskExecution} with the highest Task Execution ID is returned. + *

+ * This method will not consider end times in its calculations. Thus, when a task + * execution {@code A} starts after task execution {@code B} but finishes BEFORE task + * execution {@code A}, then task execution {@code B} is being returned. + * + * @param taskNames At least 1 task name must be provided + * @return List of TaskExecutions. May be empty but never null. + */ + List getLatestTaskExecutionsByTaskNames(String... taskNames); + + /** + * Returns the latest task execution for a given task name. Will ultimately apply the + * same algorithm underneath as {@link #getLatestTaskExecutionsByTaskNames(String...)} + * but will only return a single result. + * + * @param taskName Must not be null or empty + * @return The latest Task Execution or null + * @see #getLatestTaskExecutionsByTaskNames(String...) + */ + AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/DataflowTaskExecutionQueryDao.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/DataflowTaskExecutionQueryDao.java new file mode 100644 index 0000000000..61999648d9 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/DataflowTaskExecutionQueryDao.java @@ -0,0 +1,177 @@ +/* + * Copyright 2017-2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task; + +import java.util.Collection; +import java.util.List; + +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.cloud.task.repository.dao.TaskExecutionDao; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +/** + * Repository to access {@link TaskExecution}s. Mirrors the {@link TaskExecutionDao} + * but contains Spring Cloud Data Flow specific operations. This functionality might + * be migrated to Spring Cloud Task itself. + * + * @author Corneil du Plessis + */ +public interface DataflowTaskExecutionQueryDao { + /** + * Retrieves a task execution from the task repository. + * + * @param executionId the id associated with the task execution. + * @return a fully qualified TaskExecution instance. + */ + AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget); + + /** + * Retrieves a list of task executions where the provided execution id and schemaTarget represents the parent of task execution. + * + * @param executionId parent task execution id + * @param schemaTarget parent task schema target + * @return the task executions + */ + List findChildTaskExecutions(long executionId, String schemaTarget); + + /** + * Retrieves a list of task executions where the provided execution ids and schemaTarget represents the parents of task executions. + * + * @param parentIds parent task execution ids + * @param schemaTarget parent task schema target + * @return the task executions + */ + List findChildTaskExecutions(Collection parentIds, String schemaTarget); + + /** + * Retrieves a subset of task executions by task name and execution status. + * + * @param taskName the name of the task to search for in the repository. + * @param completed indicator to retrieve only completed task executions. + * @return a list that contains task executions. + */ + List findTaskExecutionsByName(String taskName, boolean completed); + + /** + * Retrieves current number of task executions for a taskName. + * + * @param taskName the name of the task to search for in the repository. + * @return current number of task executions for the taskName. + */ + long getTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves the number of task execution that have completed. + * + * @param taskName the name of the task to search + * @return the number of completed task executions + */ + long getCompletedTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves current number of task executions for a taskName and with an endTime of + * null. + * + * @param taskName the name of the task to search for in the repository. + * @return current number of task executions for the taskName. + */ + long getRunningTaskExecutionCountByTaskName(String taskName); + + /** + * Retrieves current number of task executions with an endTime of null. + * + * @return current number of task executions. + */ + long getRunningTaskExecutionCount(); + + /** + * Retrieves current number of task executions. + * + * @return current number of task executions. + */ + long getTaskExecutionCount(); + + /** + * Retrieves a set of task executions that are running for a taskName. + * + * @param taskName the name of the task to search for in the repository. + * @param pageable the constraints for the search. + * @return set of running task executions. + */ + Page findRunningTaskExecutions(String taskName, Pageable pageable); + + /** + * Retrieves a subset of task executions by task name, start location and size. + * + * @param taskName the name of the task to search for in the repository. + * @param pageable the constraints for the search. + * @return a list that contains task executions from the query bound by the start + * position and count specified by the user. + */ + Page findTaskExecutionsByName(String taskName, Pageable pageable); + + /** + * Retrieves a sorted list of distinct task names for the task executions. + * + * @return a list of distinct task names from the task repository.. + */ + List getTaskNames(); + + /** + * Retrieves all the task executions within the pageable constraints. + * + * @param pageable the constraints for the search + * @return page containing the results from the search + */ + + Page findAll(Pageable pageable); + + /** + * Returns a {@link List} of the latest {@link TaskExecution} for 1 or more task + * names. + *

+ * Latest is defined by the most recent start time. A {@link TaskExecution} does not + * have to be finished (The results may including pending {@link TaskExecution}s). + *

+ * It is theoretically possible that a {@link TaskExecution} with the same name to + * have more than 1 {@link TaskExecution} for the exact same start time. In that case + * the {@link TaskExecution} with the highest Task Execution ID is returned. + *

+ * This method will not consider end times in its calculations. Thus, when a task + * execution {@code A} starts after task execution {@code B} but finishes BEFORE task + * execution {@code A}, then task execution {@code B} is being returned. + * + * @param taskNames At least 1 task name must be provided + * @return List of TaskExecutions. May be empty but never null. + */ + List getLatestTaskExecutionsByTaskNames(String... taskNames); + + /** + * Returns the latest task execution for a given task name. Will ultimately apply the + * same algorithm underneath as {@link #getLatestTaskExecutionsByTaskNames(String...)} + * but will only return a single result. + * + * @param taskName Must not be null or empty + * @return The latest Task Execution or null + * @see #getLatestTaskExecutionsByTaskNames(String...) + */ + AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName); + + AggregateTaskExecution geTaskExecutionByExecutionId(String executionId, String taskName); + +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDefinitionReader.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDefinitionReader.java new file mode 100644 index 0000000000..a88434e8b4 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDefinitionReader.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.aggregate.task; + +import org.springframework.cloud.dataflow.core.TaskDefinition; + +public interface TaskDefinitionReader { + TaskDefinition findTaskDefinition(String taskName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDeploymentReader.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDeploymentReader.java new file mode 100644 index 0000000000..768ee84069 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskDeploymentReader.java @@ -0,0 +1,11 @@ +package org.springframework.cloud.dataflow.aggregate.task; + +import java.util.List; + +import org.springframework.cloud.dataflow.core.TaskDeployment; + +public interface TaskDeploymentReader { + TaskDeployment getDeployment(String externalTaskId); + TaskDeployment getDeployment(String externalTaskId, String platform); + TaskDeployment findByDefinitionName(String definitionName); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskRepositoryContainer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskRepositoryContainer.java new file mode 100644 index 0000000000..77dae057a2 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/TaskRepositoryContainer.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.aggregate.task; + +import org.springframework.cloud.task.repository.TaskRepository; + +public interface TaskRepositoryContainer { + TaskRepository get(String schemaTarget); +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/AggregateDataFlowTaskExecutionQueryDao.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/AggregateDataFlowTaskExecutionQueryDao.java new file mode 100644 index 0000000000..df50268e09 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/AggregateDataFlowTaskExecutionQueryDao.java @@ -0,0 +1,523 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import javax.sql.DataSource; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.item.database.Order; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.database.PagingQueryProvider; +import org.springframework.cloud.task.repository.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.dao.EmptyResultDataAccessException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; +import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; +import org.springframework.jdbc.core.namedparam.SqlParameterSource; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * Provide aggregate data for Boot 3 and Boot <=2 TaskExecutions. + * + * @author Corneil du Plessis + */ + +public class AggregateDataFlowTaskExecutionQueryDao implements DataflowTaskExecutionQueryDao { + private final static Logger logger = LoggerFactory.getLogger(AggregateDataFlowTaskExecutionQueryDao.class); + + /** + * SELECT clause for task execution. + */ + public static final String SELECT_CLAUSE = "TASK_EXECUTION_ID, " + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE, " + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, " + + "EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, SCHEMA_TARGET "; + + /** + * FROM clause for task execution. + */ + public static final String FROM_CLAUSE = "AGGREGATE_TASK_EXECUTION"; + + /** + * WHERE clause for running task. + */ + public static final String RUNNING_TASK_WHERE_CLAUSE = "where TASK_NAME = :taskName AND END_TIME IS NULL "; + + /** + * WHERE clause for task name. + */ + public static final String TASK_NAME_WHERE_CLAUSE = "where TASK_NAME = :taskName "; + + private static final String FIND_TASK_ARGUMENTS = "SELECT TASK_EXECUTION_ID, " + + "TASK_PARAM from AGGREGATE_TASK_EXECUTION_PARAMS where TASK_EXECUTION_ID = :taskExecutionId and SCHEMA_TARGET = :schemaTarget"; + + private static final String GET_EXECUTION_BY_ID = "SELECT TASK_EXECUTION_ID," + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE," + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID," + + "PARENT_EXECUTION_ID, SCHEMA_TARGET" + + " from AGGREGATE_TASK_EXECUTION where TASK_EXECUTION_ID = :taskExecutionId and SCHEMA_TARGET = :schemaTarget"; + + private final static String GET_CHILD_EXECUTION_BY_ID = "SELECT TASK_EXECUTION_ID," + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE," + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID," + + "PARENT_EXECUTION_ID, SCHEMA_TARGET" + + " from AGGREGATE_TASK_EXECUTION" + + " where PARENT_EXECUTION_ID = :taskExecutionId" + + " and (SELECT COUNT(*) FROM AGGREGATE_TASK_EXECUTION_PARAMS P " + + " WHERE P.TASK_EXECUTION_ID=TASK_EXECUTION_ID " + + " AND P.SCHEMA_TARGET=SCHEMA_TARGET" + + " AND P.TASK_PARAM = :schemaTarget) > 0"; + + private final static String GET_CHILD_EXECUTION_BY_IDS = "SELECT TASK_EXECUTION_ID," + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE," + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID," + + "PARENT_EXECUTION_ID, SCHEMA_TARGET" + + " from AGGREGATE_TASK_EXECUTION" + + " where PARENT_EXECUTION_ID IN (:taskExecutionIds)" + + " and (SELECT COUNT(*) FROM AGGREGATE_TASK_EXECUTION_PARAMS P " + + " WHERE P.TASK_EXECUTION_ID=TASK_EXECUTION_ID " + + " AND P.SCHEMA_TARGET=SCHEMA_TARGET" + + " AND P.TASK_PARAM = :schemaTarget) > 0"; + + private static final String GET_EXECUTION_BY_EXTERNAL_EXECUTION_ID = "SELECT TASK_EXECUTION_ID," + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE," + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID," + + "PARENT_EXECUTION_ID, SCHEMA_TARGET" + + " from AGGREGATE_TASK_EXECUTION where EXTERNAL_EXECUTION_ID = :externalExecutionId and TASK_NAME = :taskName"; + + private static final String GET_EXECUTION_BY_NAME_COMPLETED = "SELECT TASK_EXECUTION_ID," + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE," + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID," + + "PARENT_EXECUTION_ID, SCHEMA_TARGET" + + " from AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName AND END_TIME IS NOT NULL"; + + private static final String GET_EXECUTION_BY_NAME = "SELECT TASK_EXECUTION_ID," + + "START_TIME, END_TIME, TASK_NAME, EXIT_CODE," + + "EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID," + + "PARENT_EXECUTION_ID, SCHEMA_TARGET" + + " from AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName"; + + private static final String TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION "; + + private static final String TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName"; + + private static final String COMPLETED_TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION WHERE END_TIME IS NOT NULL"; + + private static final String COMPLETED_TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName AND END_TIME IS NOT NULL "; + + + private static final String RUNNING_TASK_EXECUTION_COUNT_BY_NAME = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where TASK_NAME = :taskName AND END_TIME IS NULL "; + + private static final String RUNNING_TASK_EXECUTION_COUNT = "SELECT COUNT(*) FROM " + + "AGGREGATE_TASK_EXECUTION where END_TIME IS NULL "; + + private static final String LAST_TASK_EXECUTIONS_BY_TASK_NAMES = "select TE2.* from (" + + "select MAX(TE.TASK_EXECUTION_ID) as TASK_EXECUTION_ID, TE.TASK_NAME, TE.START_TIME from (" + + "select TASK_NAME, MAX(START_TIME) as START_TIME" + + " FROM AGGREGATE_TASK_EXECUTION where TASK_NAME in (:taskNames)" + + " GROUP BY TASK_NAME) TE_MAX" + + " inner join AGGREGATE_TASK_EXECUTION TE ON TE.TASK_NAME = TE_MAX.TASK_NAME AND TE.START_TIME = TE_MAX.START_TIME" + + " group by TE.TASK_NAME, TE.START_TIME" + ") TE1" + + " inner join AGGREGATE_TASK_EXECUTION TE2 ON TE1.TASK_EXECUTION_ID = TE2.TASK_EXECUTION_ID AND TE1.SCHEMA_TARGET = TE2.SCHEMA_TARGET" + + " order by TE2.START_TIME DESC, TE2.TASK_EXECUTION_ID DESC"; + + private static final String FIND_TASK_NAMES = "SELECT distinct TASK_NAME from AGGREGATE_TASK_EXECUTION order by TASK_NAME"; + + private static final Set validSortColumns = new HashSet<>(10); + + static { + validSortColumns.add("TASK_EXECUTION_ID"); + validSortColumns.add("START_TIME"); + validSortColumns.add("END_TIME"); + validSortColumns.add("TASK_NAME"); + validSortColumns.add("EXIT_CODE"); + validSortColumns.add("EXIT_MESSAGE"); + validSortColumns.add("ERROR_MESSAGE"); + validSortColumns.add("LAST_UPDATED"); + validSortColumns.add("EXTERNAL_EXECUTION_ID"); + validSortColumns.add("PARENT_EXECUTION_ID"); + } + + private final NamedParameterJdbcTemplate jdbcTemplate; + + private final DataSource dataSource; + + private final LinkedHashMap orderMap; + + private final SchemaService schemaService; + + /** + * Initializes the AggregateDataFlowJobExecutionDao. + * + * @param dataSource used by the dao to execute queries and update the tables. + */ + public AggregateDataFlowTaskExecutionQueryDao(DataSource dataSource, SchemaService schemaService) { + Assert.notNull(dataSource, "The dataSource must not be null."); + this.jdbcTemplate = new NamedParameterJdbcTemplate(dataSource); + this.dataSource = dataSource; + this.schemaService = schemaService; + this.orderMap = new LinkedHashMap<>(); + this.orderMap.put("START_TIME", Order.DESCENDING); + this.orderMap.put("TASK_EXECUTION_ID", Order.DESCENDING); + } + + @Override + public AggregateTaskExecution geTaskExecutionByExecutionId(String externalExecutionId, String taskName) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("externalExecutionId", externalExecutionId) + .addValue("taskName", taskName); + + try { + return this.jdbcTemplate.queryForObject( + GET_EXECUTION_BY_EXTERNAL_EXECUTION_ID, + queryParameters, + new CompositeTaskExecutionRowMapper() + ); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionId", executionId, Types.BIGINT) + .addValue("schemaTarget", schemaTarget); + + try { + return this.jdbcTemplate.queryForObject( + GET_EXECUTION_BY_ID, + queryParameters, + new CompositeTaskExecutionRowMapper() + ); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public List findChildTaskExecutions(long executionId, String schemaTarget) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionId", executionId, Types.BIGINT) + .addValue("schemaTarget", "--spring.cloud.task.parent-schema-target=" + schemaTarget); + + try { + return this.jdbcTemplate.query( + GET_CHILD_EXECUTION_BY_ID, + queryParameters, + new CompositeTaskExecutionRowMapper() + ); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public List findChildTaskExecutions(Collection parentIds, String schemaTarget) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskExecutionIds", parentIds) + .addValue("schemaTarget", "--spring.cloud.task.parent-schema-target=" + schemaTarget); + + try { + return this.jdbcTemplate.query( + GET_CHILD_EXECUTION_BY_IDS, + queryParameters, + new CompositeTaskExecutionRowMapper() + ); + } catch (EmptyResultDataAccessException e) { + return null; + } + } + + @Override + public List findTaskExecutionsByName(String taskName, boolean completed) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName); + String query = completed ? GET_EXECUTION_BY_NAME_COMPLETED : GET_EXECUTION_BY_NAME; + return this.jdbcTemplate.query(query, queryParameters, new CompositeTaskExecutionRowMapper()); + } + + @Override + public long getTaskExecutionCountByTaskName(String taskName) { + Long count; + if (StringUtils.hasText(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName, Types.VARCHAR); + + try { + count = this.jdbcTemplate.queryForObject(TASK_EXECUTION_COUNT_BY_NAME, queryParameters, Long.class); + } catch (EmptyResultDataAccessException e) { + count = 0L; + } + } else { + count = this.jdbcTemplate.queryForObject(TASK_EXECUTION_COUNT, Collections.emptyMap(), Long.class); + } + return count != null ? count : 0L; + } + + @Override + public long getCompletedTaskExecutionCountByTaskName(String taskName) { + Long count; + if (StringUtils.hasText(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName, Types.VARCHAR); + + try { + count = this.jdbcTemplate.queryForObject(COMPLETED_TASK_EXECUTION_COUNT_BY_NAME, queryParameters, Long.class); + } catch (EmptyResultDataAccessException e) { + count = 0L; + } + } else { + count = this.jdbcTemplate.queryForObject(COMPLETED_TASK_EXECUTION_COUNT, Collections.emptyMap(), Long.class); + } + return count != null ? count : 0L; + } + + @Override + public long getRunningTaskExecutionCountByTaskName(String taskName) { + Long count; + if (StringUtils.hasText(taskName)) { + final SqlParameterSource queryParameters = new MapSqlParameterSource() + .addValue("taskName", taskName, Types.VARCHAR); + + try { + logger.debug("getRunningTaskExecutionCountByTaskName:{}:sql={}", taskName, RUNNING_TASK_EXECUTION_COUNT_BY_NAME); + count = this.jdbcTemplate.queryForObject(RUNNING_TASK_EXECUTION_COUNT_BY_NAME, queryParameters, Long.class); + } catch (EmptyResultDataAccessException e) { + count = 0L; + } + } else { + logger.debug("getRunningTaskExecutionCountByTaskName:{}:sql={}", taskName, RUNNING_TASK_EXECUTION_COUNT); + count = this.jdbcTemplate.queryForObject(RUNNING_TASK_EXECUTION_COUNT, Collections.emptyMap(), Long.class); + + } + return count != null ? count : 0L; + } + + @Override + public long getRunningTaskExecutionCount() { + try { + final SqlParameterSource queryParameters = new MapSqlParameterSource(); + Long result = this.jdbcTemplate.queryForObject(RUNNING_TASK_EXECUTION_COUNT, queryParameters, Long.class); + return result != null ? result : 0L; + } catch (EmptyResultDataAccessException e) { + return 0; + } + } + + @Override + public List getLatestTaskExecutionsByTaskNames(String... taskNames) { + Assert.notEmpty(taskNames, "At least 1 task name must be provided."); + final List taskNamesAsList = new ArrayList<>(); + + for (String taskName : taskNames) { + if (StringUtils.hasText(taskName)) { + taskNamesAsList.add(taskName); + } + } + + Assert.isTrue(taskNamesAsList.size() == taskNames.length, String.format( + "Task names must not contain any empty elements but %s of %s were empty or null.", + taskNames.length - taskNamesAsList.size(), taskNames.length)); + + try { + final Map> paramMap = Collections + .singletonMap("taskNames", taskNamesAsList); + return this.jdbcTemplate.query(LAST_TASK_EXECUTIONS_BY_TASK_NAMES, paramMap, new CompositeTaskExecutionRowMapper()); + } catch (EmptyResultDataAccessException e) { + return Collections.emptyList(); + } + } + + @Override + public AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName) { + Assert.hasText(taskName, "The task name must not be empty."); + final List taskExecutions = this + .getLatestTaskExecutionsByTaskNames(taskName); + if (taskExecutions.isEmpty()) { + return null; + } else if (taskExecutions.size() == 1) { + return taskExecutions.get(0); + } else { + throw new IllegalStateException( + "Only expected a single TaskExecution but received " + + taskExecutions.size()); + } + } + + @Override + public long getTaskExecutionCount() { + try { + Long count = this.jdbcTemplate.queryForObject(TASK_EXECUTION_COUNT, new MapSqlParameterSource(), Long.class); + return count != null ? count : 0; + } catch (EmptyResultDataAccessException e) { + return 0; + } + } + + @Override + public Page findRunningTaskExecutions(String taskName, Pageable pageable) { + return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, + RUNNING_TASK_WHERE_CLAUSE, + new MapSqlParameterSource("taskName", taskName), + getRunningTaskExecutionCountByTaskName(taskName)); + } + + @Override + public Page findTaskExecutionsByName(String taskName, Pageable pageable) { + return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, + TASK_NAME_WHERE_CLAUSE, new MapSqlParameterSource("taskName", taskName), + getTaskExecutionCountByTaskName(taskName)); + } + + @Override + public List getTaskNames() { + return this.jdbcTemplate.queryForList(FIND_TASK_NAMES, + new MapSqlParameterSource(), String.class); + } + + @Override + public Page findAll(Pageable pageable) { + return queryForPageableResults(pageable, SELECT_CLAUSE, FROM_CLAUSE, null, + new MapSqlParameterSource(), getTaskExecutionCount()); + } + + + private Page queryForPageableResults( + Pageable pageable, + String selectClause, + String fromClause, + String whereClause, + MapSqlParameterSource queryParameters, + long totalCount + ) { + SqlPagingQueryProviderFactoryBean factoryBean = new SqlPagingQueryProviderFactoryBean(); + factoryBean.setSelectClause(selectClause); + factoryBean.setFromClause(fromClause); + if (StringUtils.hasText(whereClause)) { + factoryBean.setWhereClause(whereClause); + } + final Sort sort = pageable.getSort(); + final LinkedHashMap sortOrderMap = new LinkedHashMap<>(); + + if (sort != null) { + for (Sort.Order sortOrder : sort) { + if (validSortColumns.contains(sortOrder.getProperty().toUpperCase())) { + sortOrderMap.put(sortOrder.getProperty(), + sortOrder.isAscending() ? Order.ASCENDING : Order.DESCENDING); + } else { + throw new IllegalArgumentException( + String.format("Invalid sort option selected: %s", sortOrder.getProperty())); + } + } + } + + if (!CollectionUtils.isEmpty(sortOrderMap)) { + factoryBean.setSortKeys(sortOrderMap); + } else { + factoryBean.setSortKeys(this.orderMap); + } + + factoryBean.setDataSource(this.dataSource); + PagingQueryProvider pagingQueryProvider; + try { + pagingQueryProvider = factoryBean.getObject(); + pagingQueryProvider.init(this.dataSource); + } catch (Exception e) { + throw new IllegalStateException(e); + } + String query = pagingQueryProvider.getPageQuery(pageable); + List resultList = this.jdbcTemplate.query(query, + queryParameters, new CompositeTaskExecutionRowMapper()); + return new PageImpl<>(resultList, pageable, totalCount); + } + + + private class CompositeTaskExecutionRowMapper implements RowMapper { + + private CompositeTaskExecutionRowMapper() { + } + + @Override + public AggregateTaskExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + long id = rs.getLong("TASK_EXECUTION_ID"); + Long parentExecutionId = rs.getLong("PARENT_EXECUTION_ID"); + if (rs.wasNull()) { + parentExecutionId = null; + } + String schemaTarget = rs.getString("SCHEMA_TARGET"); + if (schemaTarget != null && schemaService.getTarget(schemaTarget) == null) { + logger.warn("Cannot find schemaTarget:{}", schemaTarget); + } + return new AggregateTaskExecution(id, + getNullableExitCode(rs), + rs.getString("TASK_NAME"), + rs.getTimestamp("START_TIME"), + rs.getTimestamp("END_TIME"), + rs.getString("EXIT_MESSAGE"), + getTaskArguments(id, schemaTarget), + rs.getString("ERROR_MESSAGE"), + rs.getString("EXTERNAL_EXECUTION_ID"), + parentExecutionId, + null, + schemaTarget + ); + } + + private Integer getNullableExitCode(ResultSet rs) throws SQLException { + int exitCode = rs.getInt("EXIT_CODE"); + return !rs.wasNull() ? exitCode : null; + } + } + + private List getTaskArguments(long taskExecutionId, String schemaTarget) { + final List params = new ArrayList<>(); + RowCallbackHandler handler = rs -> params.add(rs.getString(2)); + this.jdbcTemplate.query( + FIND_TASK_ARGUMENTS, + new MapSqlParameterSource("taskExecutionId", taskExecutionId) + .addValue("schemaTarget", schemaTarget), + handler); + return params; + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateExecutionSupport.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateExecutionSupport.java new file mode 100644 index 0000000000..0e49f165f4 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateExecutionSupport.java @@ -0,0 +1,136 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import java.util.List; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.AppRegistration; +import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.util.StringUtils; + +/** + * Provides support for access to SchemaVersionTarget information and conversion of execution data to composite executions. + * + * @author Corneil du Plessis + */ + +public class DefaultAggregateExecutionSupport implements AggregateExecutionSupport { + private static Logger logger = LoggerFactory.getLogger(AggregateExecutionSupport.class); + + private final AppRegistryService registryService; + + private final SchemaService schemaService; + + public DefaultAggregateExecutionSupport( + AppRegistryService registryService, + SchemaService schemaService + ) { + this.registryService = registryService; + this.schemaService = schemaService; + } + + @Override + public AggregateTaskExecution from(TaskExecution execution, TaskDefinitionReader taskDefinitionReader, TaskDeploymentReader taskDeploymentReader) { + TaskDefinition taskDefinition = taskDefinitionReader.findTaskDefinition(execution.getTaskName()); + TaskDeployment deployment = null; + if (StringUtils.hasText(execution.getExternalExecutionId())) { + deployment = taskDeploymentReader.getDeployment(execution.getExternalExecutionId()); + } else { + if(taskDefinition == null) { + logger.warn("TaskDefinition not found for " + execution.getTaskName()); + } else { + deployment = taskDeploymentReader.findByDefinitionName(taskDefinition.getName()); + } + } + SchemaVersionTarget versionTarget = findSchemaVersionTarget(execution.getTaskName(), taskDefinition); + return from(execution, versionTarget.getName(), deployment != null ? deployment.getPlatformName() : null); + } + + @Override + public SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinitionReader taskDefinitionReader) { + logger.debug("findSchemaVersionTarget:{}", taskName); + TaskDefinition definition = taskDefinitionReader.findTaskDefinition(taskName); + return findSchemaVersionTarget(taskName, definition); + } + + @Override + public SchemaVersionTarget findSchemaVersionTarget(String taskName, TaskDefinition taskDefinition) { + String registeredName = taskDefinition != null ? taskDefinition.getRegisteredAppName() : taskName; + AppRegistration registration = findTaskAppRegistration(registeredName); + if (registration == null) { + logger.warn("Cannot find AppRegistration for {}", taskName); + return SchemaVersionTarget.defaultTarget(); + } + final AppRegistration finalRegistration = registration; + List versionTargets = schemaService.getTargets().getSchemas() + .stream() + .filter(target -> target.getSchemaVersion().equals(finalRegistration.getBootVersion())) + .collect(Collectors.toList()); + if (versionTargets.isEmpty()) { + logger.warn("Cannot find a SchemaVersionTarget for {}", registration.getBootVersion()); + return SchemaVersionTarget.defaultTarget(); + } + if (versionTargets.size() > 1) { + throw new IllegalStateException("Multiple SchemaVersionTargets for " + registration.getBootVersion()); + } + SchemaVersionTarget schemaVersionTarget = versionTargets.get(0); + logger.debug("findSchemaVersionTarget:{}={},{}", taskName, registeredName, schemaVersionTarget); + return schemaVersionTarget; + } + + @Override + public AppRegistration findTaskAppRegistration(String registeredAppName) { + AppRegistration registration = registryService.find(registeredAppName, ApplicationType.task); + if (registration == null) { + registration = registryService.find(registeredAppName, ApplicationType.app); + } + return registration; + } + + @Override + public AggregateTaskExecution from(TaskExecution execution, String schemaTarget, String platformName) { + if (execution != null) { + return new AggregateTaskExecution( + execution.getExecutionId(), + execution.getExitCode(), + execution.getTaskName(), + execution.getStartTime(), + execution.getEndTime(), + execution.getExitMessage(), + execution.getArguments(), + execution.getErrorMessage(), + execution.getExternalExecutionId(), + execution.getParentExecutionId(), + platformName, + schemaTarget); + } + return null; + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateTaskExplorer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateTaskExplorer.java new file mode 100644 index 0000000000..b7aa10408b --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultAggregateTaskExplorer.java @@ -0,0 +1,279 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implements CompositeTaskExplorer. This class will be responsible for retrieving task execution data for all schema targets. + * + * @author Corneil du Plessis + */ +public class DefaultAggregateTaskExplorer implements AggregateTaskExplorer { + private final static Logger logger = LoggerFactory.getLogger(DefaultAggregateTaskExplorer.class); + + private final Map taskExplorers; + + private final AggregateExecutionSupport aggregateExecutionSupport; + + private final DataflowTaskExecutionQueryDao taskExecutionQueryDao; + + private final TaskDefinitionReader taskDefinitionReader; + + private final TaskDeploymentReader taskDeploymentReader; + + public DefaultAggregateTaskExplorer( + DataSource dataSource, + DataflowTaskExecutionQueryDao taskExecutionQueryDao, + SchemaService schemaService, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader, + TaskDeploymentReader taskDeploymentReader + ) { + this.taskExecutionQueryDao = taskExecutionQueryDao; + this.aggregateExecutionSupport = aggregateExecutionSupport; + this.taskDefinitionReader = taskDefinitionReader; + this.taskDeploymentReader = taskDeploymentReader; + Map result = new HashMap<>(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + TaskExplorer explorer = new SimpleTaskExplorer(new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, target.getTaskPrefix())); + result.put(target.getName(), explorer); + } + taskExplorers = Collections.unmodifiableMap(result); + } + + @Override + public AggregateTaskExecution getTaskExecution(long executionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskExplorer taskExplorer = taskExplorers.get(schemaTarget); + Assert.notNull(taskExplorer, "Expected taskExplorer for " + schemaTarget); + TaskExecution taskExecution = taskExplorer.getTaskExecution(executionId); + TaskDeployment deployment = null; + if (taskExecution != null) { + if (StringUtils.hasText(taskExecution.getExternalExecutionId())) { + deployment = taskDeploymentReader.getDeployment(taskExecution.getExternalExecutionId()); + } else { + TaskDefinition definition = taskDefinitionReader.findTaskDefinition(taskExecution.getTaskName()); + if (definition == null) { + logger.warn("Cannot find definition for " + taskExecution.getTaskName()); + } else { + deployment = taskDeploymentReader.findByDefinitionName(definition.getName()); + } + } + } + return aggregateExecutionSupport.from(taskExecution, schemaTarget, deployment != null ? deployment.getPlatformName() : null); + } + + @Override + public AggregateTaskExecution getTaskExecutionByExternalExecutionId(String externalExecutionId, String platform) { + TaskDeployment deployment = taskDeploymentReader.getDeployment(externalExecutionId, platform); + if (deployment != null) { + return this.taskExecutionQueryDao.geTaskExecutionByExecutionId(externalExecutionId, deployment.getTaskDefinitionName()); + } + return null; + } + + @Override + public List findChildTaskExecutions(long executionId, String schemaTarget) { + return this.taskExecutionQueryDao.findChildTaskExecutions(executionId, schemaTarget); + } + + @Override + public List findChildTaskExecutions(Collection parentIds, String schemaTarget) { + return this.taskExecutionQueryDao.findChildTaskExecutions(parentIds, schemaTarget); + } + + @Override + public Page findRunningTaskExecutions(String taskName, Pageable pageable) { + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + TaskExplorer taskExplorer = taskExplorers.get(target.getName()); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target.getName()); + TaskDefinition definition = taskDefinitionReader.findTaskDefinition(taskName); + if (definition == null) { + logger.warn("Cannot find TaskDefinition for " + taskName); + } + TaskDeployment deployment = definition != null ? taskDeploymentReader.findByDefinitionName(definition.getName()) : null; + final String platformName = deployment != null ? deployment.getPlatformName() : null; + Page executions = taskExplorer.findRunningTaskExecutions(taskName, pageable); + List taskExecutions = executions.getContent() + .stream() + .map(execution -> aggregateExecutionSupport.from(execution, target.getName(), platformName)) + .collect(Collectors.toList()); + return new PageImpl<>(taskExecutions, executions.getPageable(), executions.getTotalElements()); + } + + @Override + public List getTaskNames() { + List result = new ArrayList<>(); + for (TaskExplorer explorer : taskExplorers.values()) { + result.addAll(explorer.getTaskNames()); + } + return result; + } + + @Override + public long getTaskExecutionCountByTaskName(String taskName) { + long result = 0; + for (TaskExplorer explorer : taskExplorers.values()) { + result += explorer.getTaskExecutionCountByTaskName(taskName); + } + return result; + } + + @Override + public long getTaskExecutionCount() { + long result = 0; + for (TaskExplorer explorer : taskExplorers.values()) { + result += explorer.getTaskExecutionCount(); + } + return result; + } + + @Override + public long getRunningTaskExecutionCount() { + long result = 0; + for (TaskExplorer explorer : taskExplorers.values()) { + result += explorer.getRunningTaskExecutionCount(); + } + return result; + } + + @Override + public List findTaskExecutionsByName(String taskName, boolean completed) { + return this.taskExecutionQueryDao.findTaskExecutionsByName(taskName, completed); + } + + @Override + public Page findTaskExecutionsByName(String taskName, Pageable pageable) { + + String platformName = getPlatformName(taskName); + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + TaskExplorer taskExplorer = taskExplorers.get(target.getName()); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target.getName()); + Page executions = taskExplorer.findTaskExecutionsByName(taskName, pageable); + List taskExecutions = executions.getContent() + .stream() + .map(execution -> aggregateExecutionSupport.from(execution, target.getName(), platformName)) + .collect(Collectors.toList()); + return new PageImpl<>(taskExecutions, executions.getPageable(), executions.getTotalElements()); + } + + private String getPlatformName(String taskName) { + String platformName = null; + TaskDefinition taskDefinition = taskDefinitionReader.findTaskDefinition(taskName); + if (taskDefinition != null) { + TaskDeployment taskDeployment = taskDeploymentReader.findByDefinitionName(taskDefinition.getName()); + platformName = taskDeployment != null ? taskDeployment.getPlatformName() : null; + } else { + logger.warn("TaskDefinition not found for " + taskName); + } + return platformName; + } + + @Override + public Page findAll(Pageable pageable) { + return taskExecutionQueryDao.findAll(pageable); + } + + @Override + public Long getTaskExecutionIdByJobExecutionId(long jobExecutionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskExplorer taskExplorer = taskExplorers.get(schemaTarget); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + schemaTarget); + return taskExplorer.getTaskExecutionIdByJobExecutionId(jobExecutionId); + } + + @Override + public Set getJobExecutionIdsByTaskExecutionId(long taskExecutionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskExplorer taskExplorer = taskExplorers.get(schemaTarget); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + schemaTarget); + return taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecutionId); + } + + @Override + public List getLatestTaskExecutionsByTaskNames(String... taskNames) { + List result = new ArrayList<>(); + for (String taskName : taskNames) { + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + String platformName = getPlatformName(taskName); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + TaskExplorer taskExplorer = taskExplorers.get(target.getName()); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target.getName()); + List taskExecutions = taskExplorer.getLatestTaskExecutionsByTaskNames(taskNames) + .stream() + .map(execution -> aggregateExecutionSupport.from(execution, target.getName(), platformName)) + .collect(Collectors.toList()); + result.addAll(taskExecutions); + } + return result; + } + + @Override + public AggregateTaskExecution getLatestTaskExecutionForTaskName(String taskName) { + + SchemaVersionTarget target = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + Assert.notNull(target, "Expected to find SchemaVersionTarget for " + taskName); + TaskExplorer taskExplorer = taskExplorers.get(target.getName()); + Assert.notNull(taskExplorer, "Expected TaskExplorer for " + target.getName()); + return aggregateExecutionSupport.from(taskExplorer.getLatestTaskExecutionForTaskName(taskName), target.getName(), getPlatformName(taskName)); + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.aggregate.task.impl.DefaultAggregateTaskExplorer"); + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultTaskRepositoryContainer.java b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultTaskRepositoryContainer.java new file mode 100644 index 0000000000..3db52d91cc --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/main/java/org/springframework/cloud/dataflow/aggregate/task/impl/DefaultTaskRepositoryContainer.java @@ -0,0 +1,72 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.aggregate.task.impl; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.repository.TaskRepository; +import org.springframework.cloud.task.repository.support.SimpleTaskRepository; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * This class manages a collection of TaskRepositories for all schemas. + * In the future there will be a datasource container for all names datasources. + * + * @author Corneil du Plessis + */ +public class DefaultTaskRepositoryContainer implements TaskRepositoryContainer { + private final static Logger logger = LoggerFactory.getLogger(DefaultTaskRepositoryContainer.class); + + private final Map taskRepositories = new HashMap<>(); + + public DefaultTaskRepositoryContainer(DataSource dataSource, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + MultiSchemaTaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean = new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, target.getTaskPrefix()); + add(target.getName(), new SimpleTaskRepository(taskExecutionDaoFactoryBean)); + } + } + + private void add(String schemaTarget, TaskRepository taskRepository) { + taskRepositories.put(schemaTarget, taskRepository); + } + + @Override + public TaskRepository get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskRepository repository = taskRepositories.get(schemaTarget); + Assert.notNull(repository, "Expected TaskRepository for " + schemaTarget); + return repository; + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.aggregate.task.impl.DefaultTaskRepositoryContainer"); + } +} diff --git a/spring-cloud-dataflow-aggregate-task/src/test/resources/logback-test.xml b/spring-cloud-dataflow-aggregate-task/src/test/resources/logback-test.xml new file mode 100644 index 0000000000..ef1714cfe1 --- /dev/null +++ b/spring-cloud-dataflow-aggregate-task/src/test/resources/logback-test.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-autoconfigure/pom.xml b/spring-cloud-dataflow-autoconfigure/pom.xml index 0044943a61..fea4fd9b26 100644 --- a/spring-cloud-dataflow-autoconfigure/pom.xml +++ b/spring-cloud-dataflow-autoconfigure/pom.xml @@ -91,6 +91,10 @@ org.apache.maven.plugins maven-surefire-plugin 3.0.0 + + 1 + 1 + org.apache.maven.surefire diff --git a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java index ac36cc18c5..2f76edafa0 100644 --- a/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java +++ b/spring-cloud-dataflow-autoconfigure/src/test/java/org/springframework/cloud/dataflow/autoconfigure/local/AbstractSchedulerPerPlatformTest.java @@ -19,7 +19,6 @@ import io.pivotal.reactor.scheduler.ReactorSchedulerClient; import org.cloudfoundry.operations.CloudFoundryOperations; import org.junit.runner.RunWith; -import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration; @@ -30,6 +29,9 @@ import org.springframework.boot.cloud.CloudPlatform; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.registry.service.AppRegistryService; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryConnectionProperties; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeployerAutoConfiguration; import org.springframework.cloud.deployer.spi.cloudfoundry.CloudFoundryDeploymentProperties; @@ -41,6 +43,8 @@ import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import static org.mockito.Mockito.mock; + /** * @author Christian Tzolov */ @@ -54,10 +58,24 @@ public abstract class AbstractSchedulerPerPlatformTest { protected ApplicationContext context; @Configuration - @EnableAutoConfiguration(exclude = { LocalDataFlowServerAutoConfiguration.class, + @EnableAutoConfiguration(exclude = {LocalDataFlowServerAutoConfiguration.class, CloudFoundryDeployerAutoConfiguration.class, SecurityAutoConfiguration.class, - SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class }) + SecurityFilterAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class}) public static class AutoConfigurationApplication { + @Bean + public AppRegistryService appRegistryService() { + return mock(AppRegistryService.class); + } + + @Bean + public TaskDefinitionReader taskDefinitionReader() { + return mock(TaskDefinitionReader.class); + } + + @Bean + public TaskDeploymentReader taskDeploymentReader() { + return mock(TaskDeploymentReader.class); + } @Configuration @ConditionalOnCloudPlatform(CloudPlatform.CLOUD_FOUNDRY) @@ -68,25 +86,25 @@ public static class CloudFoundryMockConfig { @Bean @Primary public ReactorSchedulerClient reactorSchedulerClient() { - return Mockito.mock(ReactorSchedulerClient.class); + return mock(ReactorSchedulerClient.class); } @Bean @Primary public CloudFoundryOperations cloudFoundryOperations() { - return Mockito.mock(CloudFoundryOperations.class); + return mock(CloudFoundryOperations.class); } @Bean @Primary public CloudFoundryConnectionProperties cloudFoundryConnectionProperties() { - return Mockito.mock(CloudFoundryConnectionProperties.class); + return mock(CloudFoundryConnectionProperties.class); } @Bean @Primary public CloudFoundryTaskLauncher CloudFoundryTaskLauncher() { - return Mockito.mock(CloudFoundryTaskLauncher.class); + return mock(CloudFoundryTaskLauncher.class); } } } diff --git a/spring-cloud-dataflow-build/pom.xml b/spring-cloud-dataflow-build/pom.xml index 77415ae282..91b5535c13 100644 --- a/spring-cloud-dataflow-build/pom.xml +++ b/spring-cloud-dataflow-build/pom.xml @@ -25,14 +25,13 @@ 2.7.13 ${project.build.directory}/build-docs ${project.build.directory}/refdocs/ - 0.1.0.RELEASE 0.1.3.RELEASE - 1.5.0-alpha.18 - 1.5.18 + 2.3.7 + 2.2.9 ${project.version} deploy ${project.version} - 1.5.8 + 2.5.7 9.2.7.0 @@ -61,7 +60,7 @@ 3.0.0-M2 1.6 3.1.1 - 1.5.8 + 2.2.4 3.0.0 2.2.1 1.20 @@ -237,6 +236,8 @@ maven-surefire-plugin ${maven-surefire-plugin.version} + 1 + 1 **/*Tests.java **/*Test.java @@ -653,6 +654,8 @@ maven-surefire-plugin ${maven-surefire-plugin.version} + 1 + 1 listener diff --git a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml index 196bd47339..7e017f9b99 100644 --- a/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml +++ b/spring-cloud-dataflow-build/spring-cloud-dataflow-build-dependencies/pom.xml @@ -21,13 +21,13 @@ UTF-8 2.7.13 - 2021.0.7 + 2021.0.8 2.1.5 4.13.1 2.7 1.10.0 42.4.3 - 1.5.1 + 1.5.2 2.3.0 3.5.4 5.12.4 diff --git a/spring-cloud-dataflow-classic-docs/pom.xml b/spring-cloud-dataflow-classic-docs/pom.xml index 2476dbce02..6e9c7aa6ee 100644 --- a/spring-cloud-dataflow-classic-docs/pom.xml +++ b/spring-cloud-dataflow-classic-docs/pom.xml @@ -127,6 +127,10 @@ + + + 1 + 1 true false diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java index 56b007b6a8..2019b3a59a 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/ApiDocumentation.java @@ -112,7 +112,9 @@ public void index() throws Exception { linkWithRel("tasks/definitions").description("Provides the task definition resource"), linkWithRel("tasks/definitions/definition").description("Provides details for a specific task definition"), linkWithRel("tasks/validation").description("Provides the validation for a task definition"), - linkWithRel("tasks/executions").description("Returns Task executions and allows launching of tasks"), + linkWithRel("tasks/executions").description("Returns Task executions"), + linkWithRel("tasks/executions/launch").description("Provides for launching a Task execution"), + linkWithRel("tasks/executions/external").description("Returns Task execution by external id"), linkWithRel("tasks/executions/current").description("Provides the current count of running tasks"), linkWithRel("tasks/info/executions").description("Provides the task executions info"), linkWithRel("tasks/schedules").description("Provides schedule information of tasks"), @@ -213,11 +215,16 @@ public void index() throws Exception { fieldWithPath("_links.tasks/definitions/definition.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/definitions/definition is templated"), fieldWithPath("_links.tasks/executions.href").description("Link to the tasks/executions"), + fieldWithPath("_links.tasks/executions/launch.href").description("Link to tasks/executions/launch"), + fieldWithPath("_links.tasks/executions/launch.templated").type(JsonFieldType.BOOLEAN).optional().description("Indicates that Link tasks/executions/launch is templated"), fieldWithPath("_links.tasks/executions/name.href").description("Link to the tasks/executions/name"), fieldWithPath("_links.tasks/executions/name.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/name is templated"), fieldWithPath("_links.tasks/executions/current.href").description("Link to the tasks/executions/current"), fieldWithPath("_links.tasks/executions/execution.href").description("Link to the tasks/executions/execution"), fieldWithPath("_links.tasks/executions/execution.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/execution is templated"), + fieldWithPath("_links.tasks/executions/external.href").description("Link to the tasks/executions/external"), + fieldWithPath("_links.tasks/executions/external.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/executions/external is templated"), + fieldWithPath("_links.tasks/info/executions.href").description("Link to the tasks/info/executions"), fieldWithPath("_links.tasks/info/executions.templated").type(JsonFieldType.BOOLEAN).optional().description("Link tasks/info is templated"), diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java index a2939e0a19..287689fed9 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/AppRegistryDocumentation.java @@ -45,196 +45,203 @@ */ public class AppRegistryDocumentation extends BaseDocumentation { - @Test - public void appDefault() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - - this.mockMvc.perform(RestDocumentationRequestBuilders - .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.2.0.RELEASE").accept(MediaType.APPLICATION_JSON)) - .andExpect(status().isAccepted()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application"), - parameterWithName("version").description("The version of the application") - ) - ) - ); - unregisterApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - unregisterApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - } - - @Test - public void registeringAnApplicationVersion() throws Exception { - this.mockMvc.perform( - post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.1.0.RELEASE") - .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type") - .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values()) + " (optional)"), - parameterWithName("name").description("The name of the application to register"), - parameterWithName("version").description("The version of the application to register") - ), - requestParameters( - parameterWithName("uri").description("URI where the application bits reside"), - parameterWithName("metadata-uri").optional() - .description("URI where the application metadata jar can be found"), - parameterWithName("force").optional() - .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); - - unregisterApp(ApplicationType.source, "http", "1.1.0.RELEASE"); - } + @Test + public void appDefault() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); + + this.mockMvc.perform(RestDocumentationRequestBuilders + .put("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.2.0.RELEASE") + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().isAccepted()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application"), + parameterWithName("version").description("The version of the application") + ) + ) + ); + unregisterApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + unregisterApp(ApplicationType.source, "http", "1.3.0.RELEASE"); + } + + @Test + public void registeringAnApplicationVersion() throws Exception { + this.mockMvc.perform( + post("/apps/{type}/{name}/{version:.+}", ApplicationType.source, "http", "1.1.0.RELEASE") + .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") + .queryParam("bootVersion", "2")) + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type") + .description("The type of application to register. One of " + Arrays.asList(ApplicationType.values()) + " (optional)"), + parameterWithName("name").description("The name of the application to register"), + parameterWithName("version").description("The version of the application to register") + ), + requestParameters( + parameterWithName("uri").description("URI where the application bits reside"), + parameterWithName("metadata-uri").optional() + .description("URI where the application metadata jar can be found"), + parameterWithName("force").optional() + .description("Must be true if a registration with the same name and type already exists, otherwise an error will occur"), + parameterWithName("bootVersion").optional() + .description("Spring Boot version. Value of 2 or 3. Must be supplied of greater than 2.") + ) + ) + ); + + unregisterApp(ApplicationType.source, "http", "1.1.0.RELEASE"); + } @Test public void bulkRegisteringApps() throws Exception { this.mockMvc.perform( - post("/apps") - .param("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") - .param("force", "false")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - requestParameters( - parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."), - parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."), - parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); + post("/apps") + .param("apps", "source.http=maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") + .param("force", "false")) + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + requestParameters( + parameterWithName("uri").optional().description("URI where a properties file containing registrations can be fetched. Exclusive with `apps`."), + parameterWithName("apps").optional().description("Inline set of registrations. Exclusive with `uri`."), + parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") + ) + ) + ); unregisterApp(ApplicationType.source, "http"); } - @Test - public void getApplicationsFiltered() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "time", "1.2.0.RELEASE"); - this.mockMvc.perform( - get("/apps") - .param("search", "") - .param("type", "source").accept(MediaType.APPLICATION_JSON) - .param("defaultVersion", "true") - .param("page", "0") - .param("size", "10") - .param("sort", "name,ASC") - ) - .andExpect(status().isOk()) - .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("search").description("The search string performed on the name (optional)"), - parameterWithName("type") - .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("defaultVersion").description("The boolean flag to set to retrieve only the apps of the default versions (optional)"), - parameterWithName("page").description("The zero-based page number (optional)"), - parameterWithName("sort").description("The sort on the list (optional)"), - parameterWithName("size").description("The requested page size (optional)") - ), - responseFields( - subsectionWithPath("_embedded.appRegistrationResourceList") - .description("Contains a collection of application"), - subsectionWithPath("_links.self").description("Link to the applications resource"), - subsectionWithPath("page").description("Pagination properties") - ) - )); - - unregisterApp(ApplicationType.source, "http"); - unregisterApp(ApplicationType.source, "time"); - } + @Test + public void getApplicationsFiltered() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + registerApp(ApplicationType.source, "time", "1.2.0.RELEASE"); + this.mockMvc.perform( + get("/apps") + .param("search", "") + .param("type", "source").accept(MediaType.APPLICATION_JSON) + .param("defaultVersion", "true") + .param("page", "0") + .param("size", "10") + .param("sort", "name,ASC") + ) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + requestParameters( + parameterWithName("search").description("The search string performed on the name (optional)"), + parameterWithName("type") + .description("Restrict the returned apps to the type of the app. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("defaultVersion").description("The boolean flag to set to retrieve only the apps of the default versions (optional)"), + parameterWithName("page").description("The zero-based page number (optional)"), + parameterWithName("sort").description("The sort on the list (optional)"), + parameterWithName("size").description("The requested page size (optional)") + ), + responseFields( + subsectionWithPath("_embedded.appRegistrationResourceList") + .description("Contains a collection of application"), + subsectionWithPath("_links.self").description("Link to the applications resource"), + subsectionWithPath("page").description("Pagination properties") + ) + )); + + unregisterApp(ApplicationType.source, "http"); + unregisterApp(ApplicationType.source, "time"); + } @Test public void getSingleApplication() throws Exception { registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); this.mockMvc.perform( - get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON) - .param("exhaustive", "false")) - .andExpect(status().isOk()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to query") - ), - requestParameters( - parameterWithName("exhaustive").optional() - .description("Return all application properties, including common Spring Boot properties") - ), - responseFields( - fieldWithPath("name").description("The name of the application"), - fieldWithPath("label").description("The label name of the application"), - fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())), - fieldWithPath("uri").description("The uri of the application"), - fieldWithPath("version").description("The version of the application"), - fieldWithPath("versions").description("All the registered versions of the application"), - fieldWithPath("defaultVersion").description("If true, the application is the default version"), - fieldWithPath("bootVersion").description("The version of Spring Boot the application targets (2, 3)"), - subsectionWithPath("options").description("The options of the application (Array)"), - fieldWithPath("shortDescription").description("The description of the application"), - fieldWithPath("inboundPortNames").description("Inbound port names of the application"), - fieldWithPath("outboundPortNames").description("Outbound port names of the application"), - fieldWithPath("optionGroups").description("Option groups of the application") - ) - ) - ); + get("/apps/{type}/{name}", ApplicationType.source, "http").accept(MediaType.APPLICATION_JSON) + .param("exhaustive", "false")) + .andExpect(status().isOk()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to query. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to query") + ), + requestParameters( + parameterWithName("exhaustive").optional() + .description("Return all application properties, including common Spring Boot properties") + ), + responseFields( + fieldWithPath("name").description("The name of the application"), + fieldWithPath("label").description("The label name of the application"), + fieldWithPath("type").description("The type of the application. One of " + Arrays.asList(ApplicationType.values())), + fieldWithPath("uri").description("The uri of the application"), + fieldWithPath("version").description("The version of the application"), + fieldWithPath("versions").description("All the registered versions of the application"), + fieldWithPath("defaultVersion").description("If true, the application is the default version"), + fieldWithPath("bootVersion").description("The version of Spring Boot the application targets (2, 3)"), + subsectionWithPath("options").description("The options of the application (Array)"), + fieldWithPath("shortDescription").description("The description of the application"), + fieldWithPath("inboundPortNames").description("Inbound port names of the application"), + fieldWithPath("outboundPortNames").description("Outbound port names of the application"), + fieldWithPath("optionGroups").description("Option groups of the application") + ) + ) + ); unregisterApp(ApplicationType.source, "http"); } - @Test - public void registeringAnApplication() throws Exception { - this.mockMvc.perform( - post("/apps/{type}/{name}", ApplicationType.source, "http") - .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE")) - .andExpect(status().isCreated()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to register") - ), - requestParameters( - parameterWithName("uri").description("URI where the application bits reside"), - parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"), - parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") - ) - ) - ); - - unregisterApp(ApplicationType.source, "http"); - } - - @Test - public void unregisteringAnApplication() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - - this.mockMvc.perform( - delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "1.2.0.RELEASE")) - .andExpect(status().isOk()) - .andDo( - this.documentationHandler.document( - pathParameters( - parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())), - parameterWithName("name").description("The name of the application to unregister"), - parameterWithName("version").description("The version of the application to unregister (optional)") - ) + @Test + public void registeringAnApplication() throws Exception { + this.mockMvc.perform( + post("/apps/{type}/{name}", ApplicationType.source, "http") + .param("uri", "maven://org.springframework.cloud.stream.app:http-source-rabbit:1.1.0.RELEASE") + .queryParam("bootVersion", "2") ) - ); - } - - @Test - public void unregisteringAllApplications() throws Exception { - registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); - registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); - this.mockMvc.perform( - delete("/apps")) - .andExpect(status().isOk() - ); - } + .andExpect(status().isCreated()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to register. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to register") + ), + requestParameters( + parameterWithName("uri").description("URI where the application bits reside"), + parameterWithName("metadata-uri").optional().description("URI where the application metadata jar can be found"), + parameterWithName("bootVersion").optional().description("The Spring Boot version of the application.Default is 2"), + parameterWithName("force").optional().description("Must be true if a registration with the same name and type already exists, otherwise an error will occur") + ) + ) + ); + + unregisterApp(ApplicationType.source, "http"); + } + + @Test + public void unregisteringAnApplication() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + + this.mockMvc.perform( + delete("/apps/{type}/{name}/{version}", ApplicationType.source, "http", "1.2.0.RELEASE")) + .andExpect(status().isOk()) + .andDo( + this.documentationHandler.document( + pathParameters( + parameterWithName("type").description("The type of application to unregister. One of " + Arrays.asList(ApplicationType.values())), + parameterWithName("name").description("The name of the application to unregister"), + parameterWithName("version").description("The version of the application to unregister (optional)") + ) + ) + ); + } + + @Test + public void unregisteringAllApplications() throws Exception { + registerApp(ApplicationType.source, "http", "1.2.0.RELEASE"); + registerApp(ApplicationType.source, "http", "1.3.0.RELEASE"); + this.mockMvc.perform( + delete("/apps")) + .andExpect(status().isOk() + ); + } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java index 15771c1262..7a8c1bdc0d 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/BaseDocumentation.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.server.rest.documentation; +import javax.sql.DataSource; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -23,8 +24,6 @@ import java.util.Map; import java.util.concurrent.Callable; -import javax.sql.DataSource; - import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; @@ -33,7 +32,14 @@ import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskPlatform; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.cloud.dataflow.server.controller.TaskSchedulerController; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.single.LocalDataflowResource; import org.springframework.cloud.deployer.spi.app.ActuatorOperations; @@ -48,8 +54,11 @@ import org.springframework.cloud.skipper.domain.Status; import org.springframework.cloud.skipper.domain.StatusCode; import org.springframework.cloud.skipper.domain.VersionInfo; +import org.springframework.cloud.task.repository.support.DatabaseType; +import org.springframework.context.ApplicationContext; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.restdocs.JUnitRestDocumentation; import org.springframework.restdocs.mockmvc.RestDocumentationResultHandler; import org.springframework.test.util.ReflectionTestUtils; @@ -81,9 +90,10 @@ public abstract class BaseDocumentation { @ClassRule public final static LocalDataflowResource springDataflowServer = new LocalDataflowResource( - "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort); + "classpath:rest-docs-config.yml", true, true, true, true, skipperServerPort); + @Before - public void setupMocks() throws Exception{ + public void setupMocks() throws Exception { reset(springDataflowServer.getSkipperClient()); AboutResource about = new AboutResource(); @@ -120,13 +130,16 @@ public void setupMocks() throws Exception{ protected DataSource dataSource; - protected void prepareDocumentationTests(WebApplicationContext context) throws Exception{ + protected ApplicationContext context; + + protected void prepareDocumentationTests(WebApplicationContext context) throws Exception { + this.context = context; this.documentationHandler = document("{class-name}/{method-name}", preprocessResponse(prettyPrint())); this.documentation = new ToggleableResultHandler(documentationHandler); this.mockMvc = MockMvcBuilders.webAppContextSetup(context) .apply(documentationConfiguration(this.restDocumentation).uris().withPort(9393)) - .alwaysDo((ToggleableResultHandler)this.documentation).build(); + .alwaysDo((ToggleableResultHandler) this.documentation).build(); this.dataSource = springDataflowServer.getWebApplicationContext().getBean(DataSource.class); TaskSchedulerController controller = this.springDataflowServer.getWebApplicationContext().getBean(TaskSchedulerController.class); @@ -138,8 +151,9 @@ protected void prepareDocumentationTests(WebApplicationContext context) throws E /** * Can be used by subclasses to easily register dummy apps, as most endpoints require apps to be effective - * @param type the type of app to register - * @param name the name of the app to register + * + * @param type the type of app to register + * @param name the name of the app to register * @param version the version to register */ void registerApp(ApplicationType type, String name, String version) throws Exception { @@ -148,49 +162,71 @@ void registerApp(ApplicationType type, String name, String version) throws Excep documentation.dontDocument( () -> this.mockMvc.perform( - post(String.format("/apps/%s/%s/%s", type, name, version)) - .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version))) + post(String.format("/apps/%s/%s/%s", type, name, version)) + .param("uri", String.format("maven://%s:%s-%s%s:%s", group, name, type, binder, version))) .andExpect(status().isCreated()) ); } void unregisterApp(ApplicationType type, String name) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete(String.format("/apps/%s/%s", type, name)) - ) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete(String.format("/apps/%s/%s", type, name)) + ) + .andExpect(status().isOk()) ); } void unregisterApp(ApplicationType type, String name, String version) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete(String.format("/apps/%s/%s/%s", type, name, version)) - ) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete(String.format("/apps/%s/%s/%s", type, name, version)) + ) + .andExpect(status().isOk()) ); } - void createStream(String name, String definition, boolean deploy) throws Exception{ + void createStream(String name, String definition, boolean deploy) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - post("/streams/definitions") - .param("name", name) - .param("definition", definition) - .param("deploy", String.valueOf(deploy))) - .andExpect(status().isCreated()) + () -> this.mockMvc.perform( + post("/streams/definitions") + .param("name", name) + .param("definition", definition) + .param("deploy", String.valueOf(deploy))) + .andExpect(status().isCreated()) ); } - void destroyStream(String name) throws Exception{ + void destroyStream(String name) throws Exception { documentation.dontDocument( - () -> this.mockMvc.perform( - delete("/streams/definitions/{name}", name)) - .andExpect(status().isOk()) + () -> this.mockMvc.perform( + delete("/streams/definitions/{name}", name)) + .andExpect(status().isOk()) ); } + protected DataflowTaskExecutionMetadataDaoContainer createDataFlowTaskExecutionMetadataDaoContainer(SchemaService schemaService) { + DataflowTaskExecutionMetadataDaoContainer result = new DataflowTaskExecutionMetadataDaoContainer(); + MultiSchemaIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowTaskExecutionMetadataDao dao = new JdbcDataflowTaskExecutionMetadataDao( + dataSource, + incrementerFactory.getIncrementer(databaseType, + SchemaUtilities.getQuery("%PREFIX%EXECUTION_METADATA_SEQ", target.getTaskPrefix()) + ), + target.getTaskPrefix() + ); + result.add(target.getName(), dao); + } + return result; + } + /** * A {@link ResultHandler} that can be turned off and on. * @@ -241,8 +277,8 @@ public SchedulerService schedulerService() { return new SchedulerService() { @Override public void schedule(String scheduleName, String taskDefinitionName, - Map taskProperties, List commandLineArgs, - String platformName) { + Map taskProperties, List commandLineArgs, + String platformName) { } @Override @@ -265,7 +301,7 @@ public void unscheduleForTaskDefinition(String taskDefinitionName) { @Override public List list(Pageable pageable, String taskDefinitionName, - String platformName) { + String platformName) { return null; } @@ -346,4 +382,4 @@ public List list() { } }; } - } +} diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java index ee3257a325..5042bf4596 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,26 +30,27 @@ import org.springframework.batch.core.JobParameter; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.http.MediaType; import org.springframework.jdbc.core.JdbcTemplate; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringRunner; +import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.put; @@ -62,24 +63,36 @@ import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + /** * Documentation for the /jobs/executions endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ @RunWith(SpringRunner.class) -@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) +@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class}) @DirtiesContext public class JobExecutionsDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; private static boolean initialized; - private JobRepository jobRepository; - private TaskExecutionDao dao; - private TaskBatchDao taskBatchDao; + + private JobRepositoryContainer jobRepositoryContainer; + + private TaskExecutionDaoContainer daoContainer; + + private TaskBatchDaoContainer taskBatchDaoContainer; + private JdbcTemplate jdbcTemplate; + private DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer; + + private AggregateExecutionSupport aggregateExecutionSupport; + + private TaskDefinitionReader taskDefinitionReader; + @Before public void setup() throws Exception { @@ -98,9 +111,9 @@ public void setup() throws Exception { 1, 1, "2", JOB_NAME + "_1", "default", new Date()); documentation.dontDocument(() -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", "DOCJOB1") - .param("definition", "timestamp --format='YYYY MM DD'")) + post("/tasks/definitions") + .param("name", "DOCJOB1") + .param("definition", "timestamp --format='YYYY MM DD'")) .andExpect(status().isOk())); initialized = true; @@ -110,251 +123,273 @@ public void setup() throws Exception { @Test public void listJobExecutions() throws Exception { this.mockMvc.perform( - get("/jobs/executions") - .param("page", "0") - .param("size", "10")) + get("/jobs/executions") + .param("page", "0") + .param("size", "10")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - responseFields( - subsectionWithPath("_embedded.jobExecutionResourceList") - .description("Contains a collection of Job Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)")), + responseFields( + subsectionWithPath("_embedded.jobExecutionResourceList") + .description("Contains a collection of Job Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") ))); } @Test public void listThinJobExecutions() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10")) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listThinJobExecutionsByJobInstanceId() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("jobInstanceId", "1")) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10") + .param("jobInstanceId", "1")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("jobInstanceId") - .description("Filter result by the job instance id")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("jobInstanceId") + .description("Filter result by the job instance id")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listThinJobExecutionsByTaskExecutionId() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("taskExecutionId", "1")) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10") + .param("taskExecutionId", "1")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("taskExecutionId") - .description("Filter result by the task execution id")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("taskExecutionId") + .description("Filter result by the task execution id")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listThinJobExecutionsByDate() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("page", "0") - .param("size", "10") - .param("fromDate", "2000-09-24T17:00:45,000") - .param("toDate", "2050-09-24T18:00:45,000")) + get("/jobs/thinexecutions") + .param("page", "0") + .param("size", "10") + .param("fromDate", "2000-09-24T17:00:45,000") + .param("toDate", "2050-09-24T18:00:45,000")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("fromDate") - .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"), - parameterWithName("toDate") - .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("fromDate") + .description("Filter result from a starting date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'"), + parameterWithName("toDate") + .description("Filter result up to the `to` date in the format 'yyyy-MM-dd'T'HH:mm:ss,SSS'")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void listJobExecutionsByName() throws Exception { this.mockMvc.perform( - get("/jobs/executions") - .param("name", JOB_NAME) - .param("page", "0") - .param("size", "10")) + get("/jobs/executions") + .param("name", JOB_NAME) + .param("page", "0") + .param("size", "10")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the job execution")), - responseFields( - subsectionWithPath("_embedded.jobExecutionResourceList") - .description("Contains a collection of Job Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("name") + .description("The name associated with the job execution")), + responseFields( + subsectionWithPath("_embedded.jobExecutionResourceList") + .description("Contains a collection of Job Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") ))); } @Test public void listThinJobExecutionsByName() throws Exception { this.mockMvc.perform( - get("/jobs/thinexecutions") - .param("name", JOB_NAME) - .param("page", "0") - .param("size", "10")) + get("/jobs/thinexecutions") + .param("name", JOB_NAME) + .param("page", "0") + .param("size", "10")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the job execution")), - responseFields( - subsectionWithPath("_embedded.jobExecutionThinResourceList") - .description("Contains a collection of Job Executions without step executions included/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties") - ))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("name") + .description("The name associated with the job execution")), + responseFields( + subsectionWithPath("_embedded.jobExecutionThinResourceList") + .description("Contains a collection of Job Executions without step executions included/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties") + ))); } @Test public void jobDisplayDetail() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}", "2")) - .andDo(print()) - .andExpect(status().isOk()) - .andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)") - ), - responseFields( - fieldWithPath("executionId").description("The execution ID of the job execution"), - fieldWithPath("stepExecutionCount").description("the number of step of the job execution"), - fieldWithPath("jobId").description("The job ID of the job execution"), - fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"), - fieldWithPath("name").description("The name of the job execution"), - fieldWithPath("startDate").description("The start date of the job execution"), - fieldWithPath("startTime").description("The start time of the job execution"), - fieldWithPath("duration").description("The duration of the job execution"), - fieldWithPath("jobParameters").description("The parameters of the job execution"), - fieldWithPath("jobParametersString").description("The parameters string of the job execution"), - fieldWithPath("restartable").description("The status restartable of the job execution"), - fieldWithPath("abandonable").description("The status abandonable of the job execution"), - fieldWithPath("stoppable").description("The status stoppable of the job execution"), - fieldWithPath("defined").description("The status defined of the job execution"), - fieldWithPath("timeZone").description("The time zone of the job execution"), - subsectionWithPath("jobExecution").description("The details of the job execution"), - subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"), - subsectionWithPath("_links.self").description("Link to the stream definition resource") + get("/jobs/executions/{id}", "2") + .queryParam("schemaTarget", "boot2") ) - )); + .andDo(print()) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution (required)") + ), + requestParameters( + parameterWithName("schemaTarget").description("Schema Target to the Job.").optional() + ), + responseFields( + fieldWithPath("executionId").description("The execution ID of the job execution"), + fieldWithPath("stepExecutionCount").description("the number of step of the job execution"), + fieldWithPath("jobId").description("The job ID of the job execution"), + fieldWithPath("taskExecutionId").description("The task execution ID of the job execution"), + fieldWithPath("name").description("The name of the job execution"), + fieldWithPath("startDate").description("The start date of the job execution"), + fieldWithPath("startTime").description("The start time of the job execution"), + fieldWithPath("duration").description("The duration of the job execution"), + fieldWithPath("jobParameters").description("The parameters of the job execution"), + fieldWithPath("jobParametersString").description("The parameters string of the job execution"), + fieldWithPath("restartable").description("The status restartable of the job execution"), + fieldWithPath("abandonable").description("The status abandonable of the job execution"), + fieldWithPath("stoppable").description("The status stoppable of the job execution"), + fieldWithPath("defined").description("The status defined of the job execution"), + fieldWithPath("timeZone").description("The time zone of the job execution"), + fieldWithPath("schemaTarget").description("The schema target of the job execution"), + subsectionWithPath("jobExecution").description("The details of the job execution"), + subsectionWithPath("jobParameters").description("The job parameters associated with the job execution"), + subsectionWithPath("_links.self").description("Link to the stream definition resource"), + subsectionWithPath("_links.stop").description("Link to stopping the job"), + subsectionWithPath("_links.restart").description("Link to restarting the job") + ) + )); } @Test public void jobStop() throws Exception { - this.mockMvc.perform(put("/jobs/executions/{id}", "1").accept(MediaType.APPLICATION_JSON).param("stop", "true")) + this.mockMvc.perform(put("/jobs/executions/{id}", "1") + .param("stop", "true") + .queryParam("schemaTarget", "boot2") + ) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters(parameterWithName("id") .description("The id of an existing job execution (required)")) , requestParameters( + parameterWithName("schemaTarget").description("The schema target of the job execution").optional(), parameterWithName("stop") .description("Sends signal to stop the job if set to true")))); } @Test public void jobRestart() throws Exception { - this.mockMvc.perform(put("/jobs/executions/{id}", "2").accept(MediaType.APPLICATION_JSON).param("restart", "true")) + this.mockMvc.perform(put("/jobs/executions/{id}", "2") + .param("restart", "true") + .queryParam("schemaTarget", "boot2") + ) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - pathParameters(parameterWithName("id") - .description("The id of an existing job execution (required)")) - , requestParameters( - parameterWithName("restart") - .description("Sends signal to restart the job if set to true")))); + pathParameters(parameterWithName("id") + .description("The id of an existing job execution (required)")) + , requestParameters( + parameterWithName("schemaTarget").description("The schema target of the job execution").optional(), + parameterWithName("restart") + .description("Sends signal to restart the job if set to true") + ) + ) + ); } private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + this.daoContainer = context.getBean(TaskExecutionDaoContainer.class); + this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class); + this.jobRepositoryContainer = context.getBean(JobRepositoryContainer.class); + this.dataflowTaskExecutionMetadataDaoContainer = context.getBean(DataflowTaskExecutionMetadataDaoContainer.class); + this.aggregateExecutionSupport = context.getBean(AggregateExecutionSupport.class); + this.taskDefinitionReader = context.getBean(TaskDefinitionReader.class); + } private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null); + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader); + TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName()); + TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), Collections.singletonList("--spring.cloud.data.flow.platformname=default"), null); Map jobParameterMap = new HashMap<>(); JobParameters jobParameters = new JobParameters(jobParameterMap); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName()); + JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), jobParameters, null); + TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName()); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); - TaskManifest manifest = new TaskManifest(); + jobRepository.update(jobExecution); + final TaskManifest manifest = new TaskManifest(); manifest.setPlatformName("default"); - DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); - - DataflowTaskExecutionMetadataDao metadataDao = new JdbcDataflowTaskExecutionMetadataDao( - dataSource, incrementerFactory.getIncrementer("h2", "task_execution_metadata_seq")); + DataflowTaskExecutionMetadataDao metadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName()); + assertThat(metadataDao).isNotNull(); TaskManifest taskManifest = new TaskManifest(); taskManifest.setPlatformName("default"); metadataDao.save(taskExecution, taskManifest); diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java index c789368bc3..058429b8f1 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobInstancesDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,16 +27,19 @@ import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; + import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringRunner; @@ -54,6 +57,7 @@ * Documentation for the /jobs/instances endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ @RunWith(SpringRunner.class) @SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) @@ -63,9 +67,11 @@ public class JobInstancesDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; private static boolean initialized; - private JobRepository jobRepository; - private TaskExecutionDao dao; - private TaskBatchDao taskBatchDao; + private JobRepositoryContainer jobRepositoryContainer; + private TaskExecutionDaoContainer daoContainer; + private TaskBatchDaoContainer taskBatchDaoContainer; + private AggregateExecutionSupport aggregateExecutionSupport; + private TaskDefinitionReader taskDefinitionReader; @Before public void setup() throws Exception { @@ -103,13 +109,16 @@ public void listJobInstances() throws Exception { @Test public void jobDisplayDetail() throws Exception { this.mockMvc.perform( - get("/jobs/instances/{id}", "1")) + get("/jobs/instances/{id}", "1").queryParam("schemaTarget", "boot2")) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( parameterWithName("id").description("The id of an existing job instance (required)") ), + requestParameters( + parameterWithName("schemaTarget").description("Schema target").optional() + ), responseFields( fieldWithPath("jobName").description("The name of the job instance"), fieldWithPath("jobInstanceId").description("The ID of the job instance"), @@ -120,21 +129,24 @@ public void jobDisplayDetail() throws Exception { } - private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + private void initialize() { + this.taskDefinitionReader = context.getBean(TaskDefinitionReader.class); + this.aggregateExecutionSupport = context.getBean(AggregateExecutionSupport.class); + this.jobRepositoryContainer = context.getBean(JobRepositoryContainer.class); + this.daoContainer = context.getBean(TaskExecutionDaoContainer.class); + this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class); } private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader); + TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName()); + TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); + JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName()); + JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); + TaskBatchDao taskBatchDao = this.taskBatchDaoContainer.get(schemaVersionTarget.getName()); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); + jobRepository.update(jobExecution); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java index 54b2a37e15..05d6c6c773 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/JobStepExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,16 +28,18 @@ import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.repository.JobRepository; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; import org.springframework.cloud.task.batch.listener.TaskBatchDao; -import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.dao.TaskExecutionDao; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; -import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringRunner; @@ -55,18 +57,26 @@ * Documentation for the /jobs/executions/{id}/steps endpoint. * * @author Glenn Renfro + * @author Corneil du Plessis */ @RunWith(SpringRunner.class) -@SpringBootTest(classes = { EmbeddedDataSourceConfiguration.class }) +@SpringBootTest(classes = {EmbeddedDataSourceConfiguration.class}) @DirtiesContext public class JobStepExecutionsDocumentation extends BaseDocumentation { private final static String JOB_NAME = "DOCJOB"; private static boolean initialized; - private JobRepository jobRepository; - private TaskExecutionDao dao; - private TaskBatchDao taskBatchDao; + + private JobRepositoryContainer jobRepositoryContainer; + + private TaskExecutionDaoContainer daoContainer; + + private TaskBatchDaoContainer taskBatchDaoContainer; + + private AggregateExecutionSupport aggregateExecutionSupport; + + private TaskDefinitionReader taskDefinitionReader; @Before public void setup() throws Exception { @@ -76,9 +86,9 @@ public void setup() throws Exception { createJobExecution(JOB_NAME, BatchStatus.STARTED); documentation.dontDocument(() -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", "DOCJOB1") - .param("definition", "timestamp --format='YYYY MM DD'")) + post("/tasks/definitions") + .param("name", "DOCJOB1") + .param("definition", "timestamp --format='YYYY MM DD'")) .andExpect(status().isOk())); initialized = true; @@ -89,84 +99,93 @@ public void setup() throws Exception { @Test public void listStepExecutionsForJob() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps", "1") - .param("page", "0") - .param("size", "10")) + get("/jobs/executions/{id}/steps", "1") + .param("page", "0") + .param("size", "10")) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - pathParameters(parameterWithName("id") - .description("The id of an existing job execution (required)")), - responseFields( - subsectionWithPath("_embedded.stepExecutionResourceList") - .description("Contains a collection of Step Executions/"), - subsectionWithPath("_links.self").description("Link to the job execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)")), + pathParameters(parameterWithName("id") + .description("The id of an existing job execution (required)")), + responseFields( + subsectionWithPath("_embedded.stepExecutionResourceList") + .description("Contains a collection of Step Executions/"), + subsectionWithPath("_links.self").description("Link to the job execution resource"), + subsectionWithPath("page").description("Pagination properties")))); } @Test public void stepDetail() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps/{stepid}", "1", "1")) - .andExpect(status().isOk()).andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)"), - parameterWithName("stepid") - .description("The id of an existing step execution for a specific job execution (required)") - ), - responseFields( - fieldWithPath("jobExecutionId").description("The ID of the job step execution"), - fieldWithPath("stepType").description("The type of the job step execution"), - subsectionWithPath("stepExecution").description("The step details of the job step execution"), - subsectionWithPath("_links.self").description("Link to the job step execution resource") - ) - )); + get("/jobs/executions/{id}/steps/{stepid}", "1", "1").queryParam("schemaTarget", "boot2")) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution (required)"), + parameterWithName("stepid") + .description("The id of an existing step execution for a specific job execution (required)") + ), + requestParameters( + parameterWithName("schemaTarget").description("Schema target").optional() + ), + responseFields( + fieldWithPath("jobExecutionId").description("The ID of the job step execution"), + fieldWithPath("stepType").description("The type of the job step execution"), + fieldWithPath("schemaTarget").description("The schema target name of the job and task state data"), + subsectionWithPath("stepExecution").description("The step details of the job step execution"), + subsectionWithPath("_links.self").description("Link to the job step execution resource"), + subsectionWithPath("_links.progress").description("Link to retrieve the progress") + ) + )); } @Test public void stepProgress() throws Exception { this.mockMvc.perform( - get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1")) - .andExpect(status().isOk()).andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The id of an existing job execution (required)"), - parameterWithName("stepid") - .description("The id of an existing step execution for a specific job execution (required)") - ), - responseFields( - subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"), - subsectionWithPath("stepExecutionHistory") - .description("The history of the job step execution"), - fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"), - fieldWithPath("finished").description("The status finished of the job step execution"), - fieldWithPath("duration").description("The duration of the job step execution"), - subsectionWithPath("_links.self").description("Link to the job step execution resource") - ) - )); + get("/jobs/executions/{id}/steps/{stepid}/progress", "1", "1")) + .andExpect(status().isOk()).andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("id").description("The id of an existing job execution (required)"), + parameterWithName("stepid") + .description("The id of an existing step execution for a specific job execution (required)") + ), + responseFields( + subsectionWithPath("stepExecution").description("The detailed step details of the job step execution"), + subsectionWithPath("stepExecutionHistory") + .description("The history of the job step execution"), + fieldWithPath("percentageComplete").description("The percentage complete of the job step execution"), + fieldWithPath("finished").description("The status finished of the job step execution"), + fieldWithPath("duration").description("The duration of the job step execution"), + subsectionWithPath("_links.self").description("Link to the job step execution resource"), + subsectionWithPath("_links.progress").description("Link to the job step progress") + ) + )); } private void initialize() throws Exception { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(this.dataSource); - repositoryFactoryBean.setTransactionManager(new DataSourceTransactionManager(this.dataSource)); - this.jobRepository = repositoryFactoryBean.getObject(); - this.dao = (new TaskExecutionDaoFactoryBean(this.dataSource)).getObject(); - this.taskBatchDao = new JdbcTaskBatchDao(this.dataSource); + this.aggregateExecutionSupport = context.getBean(AggregateExecutionSupport.class); + this.jobRepositoryContainer = context.getBean(JobRepositoryContainer.class); + this.daoContainer = context.getBean(TaskExecutionDaoContainer.class); + this.taskBatchDaoContainer = context.getBean(TaskBatchDaoContainer.class); + this.taskDefinitionReader = context.getBean(TaskDefinitionReader.class); } private void createJobExecution(String name, BatchStatus status) { - TaskExecution taskExecution = this.dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); - JobExecution jobExecution = this.jobRepository.createJobExecution(this.jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(name, taskDefinitionReader); + TaskExecutionDao dao = this.daoContainer.get(schemaVersionTarget.getName()); + TaskExecution taskExecution = dao.createTaskExecution(name, new Date(), new ArrayList<>(), null); + JobRepository jobRepository = this.jobRepositoryContainer.get(schemaVersionTarget.getName()); + JobExecution jobExecution = jobRepository.createJobExecution(jobRepository.createJobInstance(name, new JobParameters()), new JobParameters(), null); StepExecution stepExecution = new StepExecution(name + "_STEP", jobExecution, jobExecution.getId()); stepExecution.setId(null); jobRepository.add(stepExecution); - this.taskBatchDao.saveRelationship(taskExecution, jobExecution); + TaskBatchDao taskBatchDao = taskBatchDaoContainer.get(schemaVersionTarget.getName()); + taskBatchDao.saveRelationship(taskExecution, jobExecution); jobExecution.setStatus(status); jobExecution.setStartTime(new Date()); - this.jobRepository.update(jobExecution); + jobRepository.update(jobExecution); } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java index 7fb6426756..8ef1e6e219 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskExecutionsDocumentation.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2019 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,10 @@ package org.springframework.cloud.dataflow.server.rest.documentation; +import java.util.concurrent.atomic.AtomicReference; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.After; import org.junit.Before; import org.junit.FixMethodOrder; @@ -23,6 +27,7 @@ import org.junit.runners.MethodSorters; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.test.web.servlet.MvcResult; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.delete; import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get; @@ -43,6 +48,7 @@ * @author Glenn Renfro * @author David Turanski * @author Gunnar Hillert + * @author Corneil du Plessis */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class TaskExecutionsDocumentation extends BaseDocumentation { @@ -52,59 +58,98 @@ public void setup() throws Exception { registerApp(ApplicationType.task, "timestamp", "1.2.0.RELEASE"); createTaskDefinition("taskA"); createTaskDefinition("taskB"); - + executeTask("taskA", "timestamp"); + executeTask("taskB", "timestamp"); } + @After public void tearDown() throws Exception { + cleanupTaskExecutions("taskA"); + cleanupTaskExecutions("taskB"); destroyTaskDefinition("taskA"); destroyTaskDefinition("taskB"); unregisterApp(ApplicationType.task, "timestamp"); } + @Test + public void launchTaskBoot3() throws Exception { + this.mockMvc.perform( + post("/tasks/executions/launch") + .param("name", "taskA") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) + .andExpect(status().isCreated()) + .andDo(this.documentationHandler.document( + requestParameters( + parameterWithName("name").description("The name of the task definition to launch"), + parameterWithName("properties") + .description("Application and Deployer properties to use while launching. (optional)"), + parameterWithName("arguments") + .description("Command line arguments to pass to the task. (optional)")), + responseFields( + fieldWithPath("taskId").description("The id of the task execution"), + fieldWithPath("schemaTarget").description("The schema target of the task state data"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").type(fieldWithPath("_links.tasks/logs").ignored().optional()).description("Link to the task execution logs").optional() + ) + ) + ); + } + @Test public void launchTask() throws Exception { this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskA") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + post("/tasks/executions") + .param("name", "taskA") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated()) .andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("name").description("The name of the task definition to launch"), - parameterWithName("properties").optional() - .description("Application and Deployer properties to use while launching"), - parameterWithName("arguments").optional() - .description("Command line arguments to pass to the task")))); + requestParameters( + parameterWithName("name").description("The name of the task definition to launch"), + parameterWithName("properties") + .description("Application and Deployer properties to use while launching. (optional)"), + parameterWithName("arguments") + .description("Command line arguments to pass to the task. (optional)") + ) + ) + ); } @Test public void getTaskCurrentCount() throws Exception { this.mockMvc.perform( - get("/tasks/executions/current")) + get("/tasks/executions/current") + ) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - responseFields( - fieldWithPath("[].name").description("The name of the platform instance (account)"), - fieldWithPath("[].type").description("The platform type"), - fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"), - fieldWithPath("[].runningExecutionCount").description("The number of running executions") - ) + responseFields( + fieldWithPath("[].name").description("The name of the platform instance (account)"), + fieldWithPath("[].type").description("The platform type"), + fieldWithPath("[].maximumTaskExecutions").description("The number of maximum task execution"), + fieldWithPath("[].runningExecutionCount").description("The number of running executions") + ) )); } @Test - public void launchTaskDisplayDetail() throws Exception { + public void getTaskDisplayDetail() throws Exception { this.mockMvc.perform( - get("/tasks/executions/{id}", "1")) + get("/tasks/executions/{id}", "1").queryParam("schemaTarget", "boot2") + ) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( pathParameters( parameterWithName("id").description("The id of an existing task execution (required)") ), + requestParameters( + parameterWithName("schemaTarget").description("The schemaTarget provided in Task execution detail") + ), responseFields( fieldWithPath("executionId").description("The id of the task execution"), fieldWithPath("exitCode").description("The exit code of the task execution"), @@ -119,6 +164,7 @@ public void launchTaskDisplayDetail() throws Exception { fieldWithPath("taskExecutionStatus").description("The status of the task execution"), fieldWithPath("parentExecutionId").description("The id of parent task execution, " + "null if task execution does not have parent"), + fieldWithPath("schemaTarget").description("The schema target of the task state data"), fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"), subsectionWithPath("appProperties").description("The application properties of the task execution"), subsectionWithPath("deploymentProperties").description("The deployment properties of the task exectuion"), @@ -130,89 +176,155 @@ public void launchTaskDisplayDetail() throws Exception { )); } + @Test + public void getTaskDisplayDetailByExternalId() throws Exception { + final AtomicReference externalExecutionId = new AtomicReference<>(null); + documentation.dontDocument(() -> { + MvcResult mvcResult = this.mockMvc.perform( + get("/tasks/executions") + .param("page", "0") + .param("size", "20")) + .andDo(print()) + .andExpect(status().isOk()).andReturn(); + ObjectMapper mapper = new ObjectMapper(); + JsonNode node = mapper.readTree(mvcResult.getResponse().getContentAsString()); + JsonNode list = node.get("_embedded").get("taskExecutionResourceList"); + JsonNode first = list.get(0); + externalExecutionId.set(first.get("externalExecutionId").asText()); + return externalExecutionId.get(); + }); + + this.mockMvc.perform( + get("/tasks/executions/external/{externalExecutionId}", externalExecutionId.get()).queryParam("platform", "default") + ) + .andDo(print()) + .andExpect(status().isOk()) + .andDo(this.documentationHandler.document( + pathParameters( + parameterWithName("externalExecutionId").description("The external ExecutionId of an existing task execution (required)") + ), + requestParameters( + parameterWithName("platform").description("The name of the platform.") + ), + responseFields( + fieldWithPath("executionId").description("The id of the task execution"), + fieldWithPath("exitCode").description("The exit code of the task execution"), + fieldWithPath("taskName").description("The task name related to the task execution"), + fieldWithPath("startTime").description("The start time of the task execution"), + fieldWithPath("endTime").description("The end time of the task execution"), + fieldWithPath("exitMessage").description("The exit message of the task execution"), + fieldWithPath("arguments").description("The arguments of the task execution"), + fieldWithPath("jobExecutionIds").description("The job executions ids of the task executions"), + fieldWithPath("errorMessage").description("The error message of the task execution"), + fieldWithPath("externalExecutionId").description("The external id of the task execution"), + fieldWithPath("taskExecutionStatus").description("The status of the task execution"), + fieldWithPath("parentExecutionId").description("The id of parent task execution, " + + "null if task execution does not have parent"), + fieldWithPath("schemaTarget").description("The schema target of the task state data"), + fieldWithPath("resourceUrl").description("The resource URL that defines the task that was executed"), + subsectionWithPath("appProperties").description("The application properties of the task execution"), + subsectionWithPath("deploymentProperties").description("The deployment properties of the task exectuion"), + subsectionWithPath("deploymentProperties").description("The deployment properties of the task execution"), + subsectionWithPath("platformName").description("The platform selected for the task execution"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.tasks/logs").description("Link to the task execution logs") + ) + )); + } @Test public void listTaskExecutions() throws Exception { - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskB") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .param("name", "taskB") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated())); this.mockMvc.perform( - get("/tasks/executions") - .param("page", "0") - .param("size", "10")) + get("/tasks/executions") + .param("page", "1") + .param("size", "2")) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)")), - responseFields( - subsectionWithPath("_embedded.taskExecutionResourceList") - .description("Contains a collection of Task Executions/"), - subsectionWithPath("_links.self").description("Link to the task execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)") + ), + responseFields( + subsectionWithPath("_embedded.taskExecutionResourceList") + .description("Contains a collection of Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("_links.first").description("Link to the first page of task execution resources").optional(), + subsectionWithPath("_links.last").description("Link to the last page of task execution resources").optional(), + subsectionWithPath("_links.next").description("Link to the next page of task execution resources").optional(), + subsectionWithPath("_links.prev").description("Link to the previous page of task execution resources").optional(), + subsectionWithPath("page").description("Pagination properties")))); } @Test public void listTaskExecutionsByName() throws Exception { this.mockMvc.perform( - get("/tasks/executions") - .param("name", "taskB") - .param("page", "0") - .param("size", "10")) + get("/tasks/executions") + .param("name", "taskB") + .param("page", "0") + .param("size", "10") + ) .andDo(print()) .andExpect(status().isOk()).andDo(this.documentationHandler.document( - requestParameters( - parameterWithName("page") - .description("The zero-based page number (optional)"), - parameterWithName("size") - .description("The requested page size (optional)"), - parameterWithName("name") - .description("The name associated with the task execution")), - responseFields( - subsectionWithPath("_embedded.taskExecutionResourceList") - .description("Contains a collection of Task Executions/"), - subsectionWithPath("_links.self").description("Link to the task execution resource"), - subsectionWithPath("page").description("Pagination properties")))); + requestParameters( + parameterWithName("page") + .description("The zero-based page number (optional)"), + parameterWithName("size") + .description("The requested page size (optional)"), + parameterWithName("name") + .description("The name associated with the task execution")), + responseFields( + subsectionWithPath("_embedded.taskExecutionResourceList") + .description("Contains a collection of Task Executions/"), + subsectionWithPath("_links.self").description("Link to the task execution resource"), + subsectionWithPath("page").description("Pagination properties")))); } @Test public void stopTask() throws Exception { this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskA") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + post("/tasks/executions") + .param("name", "taskA") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar") + ) .andExpect(status().isCreated()); this.mockMvc.perform( - post("/tasks/executions/{id}", 1) - .param("platform", "default")) + post("/tasks/executions/{id}", 1) + .queryParam("schemaTarget", "boot2") + ) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - pathParameters( - parameterWithName("id").description("The ids of an existing task execution (required)") - ), - requestParameters(parameterWithName("platform") - .description("The platform associated with the task execution(optional)")))); + pathParameters( + parameterWithName("id").description("The ids of an existing task execution (required)") + ), + requestParameters( + parameterWithName("schemaTarget").description("The schemaTarget provided in Task execution detail. (optional)")) + ) + ); } @Test public void taskExecutionRemove() throws Exception { - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/executions") - .param("name", "taskB") - .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") - .param("arguments", "--server.port=8080 --foo=bar")) + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/executions") + .param("name", "taskB") + .param("properties", "app.my-task.foo=bar,deployer.my-task.something-else=3") + .param("arguments", "--server.port=8080 --foo=bar")) .andExpect(status().isCreated())); this.mockMvc.perform( - delete("/tasks/executions/{ids}?action=CLEANUP", "1")) + delete("/tasks/executions/{ids}?action=CLEANUP", "1")) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( @@ -225,28 +337,48 @@ public void taskExecutionRemove() throws Exception { @Test public void taskExecutionRemoveAndTaskDataRemove() throws Exception { this.mockMvc.perform( - delete("/tasks/executions/{ids}?action=CLEANUP,REMOVE_DATA", "1,2")) + delete("/tasks/executions/{ids}?schemaTarget=boot2&action=CLEANUP,REMOVE_DATA", "1,2")) .andDo(print()) .andExpect(status().isOk()) .andDo(this.documentationHandler.document( - requestParameters(parameterWithName("action").description("Using both actions CLEANUP and REMOVE_DATA simultaneously.")), + requestParameters( + parameterWithName("action").description("Using both actions CLEANUP and REMOVE_DATA simultaneously."), + parameterWithName("schemaTarget").description("Schema target for task. (optional)") + ), pathParameters(parameterWithName("ids") - .description("Providing 2 comma separated task execution id values.")) + .description("Providing 2 comma separated task execution id values.") + ) )); } - private void createTaskDefinition(String taskName) throws Exception{ - documentation.dontDocument( () -> this.mockMvc.perform( - post("/tasks/definitions") - .param("name", taskName) - .param("definition", "timestamp --format='yyyy MM dd'")) + private void createTaskDefinition(String taskName) throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + post("/tasks/definitions") + .param("name", taskName) + .param("definition", "timestamp --format='yyyy MM dd'")) .andExpect(status().isOk())); } - - private void destroyTaskDefinition(String taskName) throws Exception{ - documentation.dontDocument( () -> this.mockMvc.perform( - delete("/tasks/definitions/{name}", taskName)) + private void cleanupTaskExecutions(String taskName) throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + delete("/tasks/executions") + .queryParam("name", taskName) + ) .andExpect(status().isOk())); } + private void destroyTaskDefinition(String taskName) throws Exception { + documentation.dontDocument(() -> this.mockMvc.perform( + delete("/tasks/definitions/{name}", taskName)) + .andExpect(status().isOk())); + } + + private void executeTask(String taskName, String registeredName) throws Exception { + documentation.dontDocument(() -> + this.mockMvc.perform( + post("/tasks/executions") + .param("name", taskName) + .param("arguments", "--server.port=8080 --foo=bar") + ).andExpect(status().isCreated()) + ); + } } diff --git a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java index f37b6319fb..fad392d0e4 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java +++ b/spring-cloud-dataflow-classic-docs/src/test/java/org/springframework/cloud/dataflow/server/rest/documentation/TaskLogsDocumentation.java @@ -21,7 +21,10 @@ import org.junit.Test; import org.junit.runners.MethodSorters; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.core.ApplicationType; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; @@ -39,6 +42,7 @@ * * @author Ilayaperumal Gopinathan * @author Glenn Renfro + * @author Corneil du Plessis */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class TaskLogsDocumentation extends BaseDocumentation { @@ -58,9 +62,13 @@ public void getLogsByTaskId() throws Exception { .andExpect(status().isCreated()); TaskDeploymentRepository taskDeploymentRepository = springDataflowServer.getWebApplicationContext().getBean(TaskDeploymentRepository.class); - TaskExecutionService service = this.springDataflowServer.getWebApplicationContext().getBean(TaskExecutionService.class); + TaskExecutionService service = springDataflowServer.getWebApplicationContext().getBean(TaskExecutionService.class); + AggregateExecutionSupport aggregateExecutionSupport = springDataflowServer.getWebApplicationContext().getBean(AggregateExecutionSupport.class); + TaskDefinitionReader taskDefinitionReader = springDataflowServer.getWebApplicationContext().getBean(TaskDefinitionReader.class); + SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); Awaitility.await().atMost(Duration.ofMillis(30000)).until(() -> service.getLog("default", - taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName).getTaskDeploymentId()).length() > 0); + taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName).getTaskDeploymentId(), + schemaVersionTarget.getName()).length() > 0); this.mockMvc.perform( get("/tasks/logs/"+taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName) .getTaskDeploymentId()).param("platformName", "default")) diff --git a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml index d46d1f1c01..cf952a05f8 100644 --- a/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml +++ b/spring-cloud-dataflow-classic-docs/src/test/resources/rest-docs-config.yml @@ -8,6 +8,9 @@ spring: metrics: collector: uri: http://localhost:${fakeMetricsCollector.port} + deployer: + local: + maximumConcurrentTasks: 50 autoconfigure: exclude: >- org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration, diff --git a/spring-cloud-dataflow-completion/pom.xml b/spring-cloud-dataflow-completion/pom.xml index 787e863258..13665e30f3 100644 --- a/spring-cloud-dataflow-completion/pom.xml +++ b/spring-cloud-dataflow-completion/pom.xml @@ -45,6 +45,10 @@ org.apache.maven.plugins maven-surefire-plugin 3.0.0 + + 1 + 1 + org.apache.maven.surefire diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java index 9a3a12a61a..f1611ffffb 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedBatchConfigurer.java @@ -16,31 +16,22 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; +import javax.sql.DataSource; import java.util.HashMap; import java.util.Map; -import javax.sql.DataSource; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; -import org.springframework.batch.support.DatabaseType; import org.springframework.boot.autoconfigure.batch.BasicBatchConfigurer; import org.springframework.boot.autoconfigure.batch.BatchProperties; import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; -import org.springframework.cloud.dataflow.composedtaskrunner.support.SqlServerSequenceMaxValueIncrementer; -import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; -import org.springframework.util.StringUtils; /** * A BatchConfigurer for CTR that will establish the transaction isolation level to ISOLATION_REPEATABLE_READ by default. @@ -80,13 +71,7 @@ protected JobRepository createJobRepository() { @Override public JobRepository getJobRepository() { JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean(); - DefaultDataFieldMaxValueIncrementerFactory incrementerFactory = - new DefaultDataFieldMaxValueIncrementerFactory(this.incrementerDataSource) { - @Override - public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { - return getIncrementerForApp(incrementerName); - } - }; + MultiSchemaIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(this.incrementerDataSource); factory.setIncrementerFactory(incrementerFactory); factory.setDataSource(this.incrementerDataSource); factory.setTransactionManager(this.getTransactionManager()); @@ -99,71 +84,4 @@ public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, Strin throw new ComposedTaskException(exception.getMessage()); } } - - private DataFieldMaxValueIncrementer getIncrementerForApp(String incrementerName) { - - DefaultDataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(this.incrementerDataSource); - DataFieldMaxValueIncrementer incrementer = null; - if(incrementerMap.containsKey(incrementerName)) { - return incrementerMap.get(incrementerName); - } - if (this.incrementerDataSource != null) { - String databaseType; - try { - databaseType = DatabaseType.fromMetaData(this.incrementerDataSource).name(); - } - catch (MetaDataAccessException e) { - throw new IllegalStateException(e); - } - if (StringUtils.hasText(databaseType) && databaseType.equals("SQLSERVER")) { - if (!isSqlServerTableSequenceAvailable(incrementerName)) { - incrementer = new SqlServerSequenceMaxValueIncrementer(this.incrementerDataSource, incrementerName); - incrementerMap.put(incrementerName, incrementer); - } - } - } - if (incrementer == null) { - try { - incrementer = incrementerFactory.getIncrementer(DatabaseType.fromMetaData(this.incrementerDataSource).name(), incrementerName); - incrementerMap.put(incrementerName, incrementer); - } - catch (Exception exception) { - logger.warn(exception.getMessage(), exception); - } - } - return incrementer; - } - - private boolean isSqlServerTableSequenceAvailable(String incrementerName) { - boolean result = false; - DatabaseMetaData metaData; - Connection connection = null; - try { - connection = this.incrementerDataSource.getConnection(); - metaData = connection.getMetaData(); - String[] types = {"TABLE"}; - ResultSet tables = metaData.getTables(null, null, "%", types); - while (tables.next()) { - if (tables.getString("TABLE_NAME").equals(incrementerName)) { - result = true; - break; - } - } - } - catch (SQLException sqe) { - logger.warn(sqe.getMessage(), sqe); - } - finally { - if(connection != null) { - try { - connection.close(); - } - catch (SQLException sqe) { - logger.warn(sqe.getMessage(), sqe); - } - } - } - return result; - } - } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java index 950f8e2490..3b6c526600 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,12 @@ package org.springframework.cloud.dataflow.composedtaskrunner; import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepExecutionListener; import org.springframework.batch.core.configuration.annotation.BatchConfigurer; @@ -25,18 +31,26 @@ import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; +import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.task.configuration.EnableTask; +import org.springframework.cloud.task.listener.TaskExecutionListener; import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; +import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; +import org.springframework.core.env.Environment; import org.springframework.core.task.TaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.util.StringUtils; /** * Configures the Job that will execute the Composed Task Execution. * * @author Glenn Renfro + * @author Corneil du Plessis */ @EnableBatchProcessing @EnableTask @@ -44,16 +58,67 @@ @Configuration @Import(org.springframework.cloud.dataflow.composedtaskrunner.StepBeanDefinitionRegistrar.class) public class ComposedTaskRunnerConfiguration { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerConfiguration.class); + + @Bean + public TaskExecutionListener taskExecutionListener() { + return new ComposedTaskRunnerTaskListener(); + } @Bean - public StepExecutionListener composedTaskStepExecutionListener(TaskExplorer taskExplorer){ - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener(taskExplorer); + public StepExecutionListener composedTaskStepExecutionListener(TaskExplorerContainer taskExplorerContainer) { + return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener(taskExplorerContainer); } @Bean - public org.springframework.cloud.dataflow.composedtaskrunner.ComposedRunnerJobFactory composedTaskJob(ComposedTaskProperties properties) { + TaskExplorerContainer taskExplorerContainer(TaskExplorer taskExplorer, DataSource dataSource, ComposedTaskProperties properties, Environment env) { + Map explorers = new HashMap<>(); + String ctrName = env.getProperty("spring.cloud.task.name"); + if (!StringUtils.hasText(ctrName)) { + throw new IllegalStateException("spring.cloud.task.name property must have a value."); + } + TaskParser parser = new TaskParser("ctr", properties.getGraph(), false, true); + StepBeanDefinitionRegistrar.TaskAppsMapCollector collector = new StepBeanDefinitionRegistrar.TaskAppsMapCollector(); + parser.parse().accept(collector); + Set taskNames = collector.getTaskApps().keySet(); + logger.debug("taskExplorerContainer:taskNames:{}", taskNames); + for (String taskName : taskNames) { + addTaskExplorer(dataSource, properties, env, explorers, taskName); + String appName = taskName.replace(ctrName + "-", ""); + addTaskExplorer(dataSource, properties, env, explorers, appName); + if(taskName.length() > ctrName.length()) { + String shortTaskName = taskName.substring(ctrName.length() + 1); + addTaskExplorer(dataSource, properties, env, explorers, shortTaskName); + } + } + return new TaskExplorerContainer(explorers, taskExplorer); + } + + private static void addTaskExplorer( + DataSource dataSource, + ComposedTaskProperties properties, + Environment env, + Map explorers, + String taskName + ) { + logger.debug("addTaskExplorer:{}", taskName); + String propertyName = String.format("app.%s.spring.cloud.task.tablePrefix", taskName); + String prefix = properties.getComposedTaskAppProperties().get(propertyName); + if (prefix == null) { + prefix = env.getProperty(propertyName); + } + if (prefix != null) { + TaskExecutionDaoFactoryBean factoryBean = new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, prefix); + logger.debug("taskExplorerContainer:adding:{}:{}", taskName, prefix); + explorers.put(taskName, new SimpleTaskExplorer(factoryBean)); + } else { + logger.warn("Cannot find {} in {} ", propertyName, properties.getComposedTaskAppProperties()); + } + } - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedRunnerJobFactory(properties); + @Bean + public ComposedRunnerJobFactory composedTaskJob(ComposedTaskProperties properties) { + return new ComposedRunnerJobFactory(properties); } @Bean @@ -71,10 +136,17 @@ public TaskExecutor taskExecutor(ComposedTaskProperties properties) { } @Bean - public BatchConfigurer getComposedBatchConfigurer(BatchProperties properties, - DataSource dataSource, TransactionManagerCustomizers transactionManagerCustomizers, - ComposedTaskProperties composedTaskProperties) { - return new org.springframework.cloud.dataflow.composedtaskrunner.ComposedBatchConfigurer(properties, - dataSource, transactionManagerCustomizers, composedTaskProperties); + public BatchConfigurer getComposedBatchConfigurer( + BatchProperties properties, + DataSource dataSource, + TransactionManagerCustomizers transactionManagerCustomizers, + ComposedTaskProperties composedTaskProperties + ) { + return new ComposedBatchConfigurer( + properties, + dataSource, + transactionManagerCustomizers, + composedTaskProperties + ); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java index 91662f08f7..c8ceb10f99 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactory.java @@ -23,6 +23,9 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,8 +36,10 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.core.Base64Utils; -import org.springframework.cloud.task.configuration.TaskConfigurer; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.cloud.task.configuration.TaskProperties; +import org.springframework.core.env.Environment; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository; @@ -49,19 +54,20 @@ * * @author Glenn Renfro * @author Michael Minella + * @author Corneil du Plessis */ public class ComposedTaskRunnerStepFactory implements FactoryBean { - private final static Logger log = LoggerFactory.getLogger(ComposedTaskRunnerStepFactory.class); + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerStepFactory.class); @Autowired private ComposedTaskProperties composedTaskProperties; private ComposedTaskProperties composedTaskPropertiesFromEnv; - private String taskName; + private final String taskName; - private String taskNameId; + private final String taskNameId; private Map taskSpecificProps = new HashMap<>(); @@ -74,7 +80,7 @@ public class ComposedTaskRunnerStepFactory implements FactoryBean { private StepExecutionListener composedTaskStepExecutionListener; @Autowired - private TaskConfigurer taskConfigurer; + private TaskExplorerContainer taskExplorerContainer; @Autowired private TaskProperties taskProperties; @@ -85,8 +91,15 @@ public class ComposedTaskRunnerStepFactory implements FactoryBean { @Autowired(required = false) private OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; + @Autowired(required = false) + private ObjectMapper mapper; + + @Autowired + private Environment environment; + public ComposedTaskRunnerStepFactory( - ComposedTaskProperties composedTaskPropertiesFromEnv, String taskName, String taskNameId) { + ComposedTaskProperties composedTaskPropertiesFromEnv, String taskName, String taskNameId + ) { Assert.notNull(composedTaskPropertiesFromEnv, "composedTaskProperties must not be null"); Assert.hasText(taskName, "taskName must not be empty nor null"); @@ -97,53 +110,63 @@ public ComposedTaskRunnerStepFactory( } public void setTaskSpecificProps(Map taskSpecificProps) { - if(taskSpecificProps != null) { + if (taskSpecificProps != null) { this.taskSpecificProps = taskSpecificProps; } } public void setArguments(List arguments) { - if(arguments != null) { + if (arguments != null) { this.arguments = arguments; } } @Override - public Step getObject() throws Exception { - + public Step getObject() { + if (this.mapper == null) { + this.mapper = new ObjectMapper(); + this.mapper.registerModule(new Jdk8Module()); + this.mapper.registerModule(new Jackson2HalModule()); + this.mapper.registerModule(new JavaTimeModule()); + this.mapper.registerModule(new Jackson2DataflowModule()); + } TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet( - this.clientRegistrations, this.clientCredentialsTokenResponseClient, taskConfigurer.getTaskExplorer(), - this.composedTaskPropertiesFromEnv, this.taskName, taskProperties); - - List argumentsFromAppProperties = Base64Utils - .decodeMap(this.composedTaskProperties.getComposedTaskAppArguments()).entrySet().stream() - .filter(e -> e.getKey().startsWith("app." + taskNameId) || e.getKey().startsWith("app.*.")) - .map(e -> e.getValue()) - .collect(Collectors.toList()); + this.clientRegistrations, + this.clientCredentialsTokenResponseClient, + this.taskExplorerContainer.get(this.taskNameId), + this.composedTaskPropertiesFromEnv, + this.taskName, + taskProperties, + environment, this.mapper); + + List argumentsFromAppProperties = Base64Utils.decodeMap(this.composedTaskProperties.getComposedTaskAppArguments()) + .entrySet() + .stream() + .filter(e -> e.getKey().startsWith("app." + taskNameId) || e.getKey().startsWith("app.*.")) + .map(Map.Entry::getValue) + .collect(Collectors.toList()); List argumentsToUse = Stream.concat(this.arguments.stream(), argumentsFromAppProperties.stream()) - .collect(Collectors.toList()); + .collect(Collectors.toList()); taskLauncherTasklet.setArguments(argumentsToUse); - log.debug("decoded composed-task-app-properties {}", composedTaskProperties.getComposedTaskAppProperties()); + logger.debug("decoded composed-task-app-properties {}", composedTaskProperties.getComposedTaskAppProperties()); Map propertiesFrom = Base64Utils .decodeMap(this.composedTaskProperties.getComposedTaskAppProperties()).entrySet().stream() .filter(e -> e.getKey().startsWith("app." + taskNameId) || e.getKey().startsWith("deployer." + taskNameId) || e.getKey().startsWith("deployer.*")) - .collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue())); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Map propertiesToUse = new HashMap<>(); propertiesToUse.putAll(this.taskSpecificProps); propertiesToUse.putAll(propertiesFrom); taskLauncherTasklet.setProperties(propertiesToUse); - log.debug("Properties to use {}", propertiesToUse); - - String stepName = this.taskName; + logger.debug("Properties to use {}", propertiesToUse); - return this.steps.get(stepName) + return this.steps.get(this.taskName) .tasklet(taskLauncherTasklet) .transactionAttribute(getTransactionAttribute()) .listener(this.composedTaskStepExecutionListener) @@ -156,7 +179,7 @@ public Step getObject() throws Exception { * what is in its transaction. By setting isolation to READ_COMMITTED * The task launcher can see latest state of the db. Since the changes * to the task execution are done by the tasks. - + * * @return DefaultTransactionAttribute with isolation set to READ_COMMITTED. */ private TransactionAttribute getTransactionAttribute() { @@ -172,8 +195,4 @@ public Class getObjectType() { return Step.class; } - @Override - public boolean isSingleton() { - return true; - } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java new file mode 100644 index 0000000000..c9f487df05 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerTaskListener.java @@ -0,0 +1,23 @@ +package org.springframework.cloud.dataflow.composedtaskrunner; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.task.listener.TaskExecutionListenerSupport; +import org.springframework.cloud.task.repository.TaskExecution; + +public class ComposedTaskRunnerTaskListener extends TaskExecutionListenerSupport { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskRunnerTaskListener.class); + + private static Long executionId = null; + + @Override + public void onTaskStartup(TaskExecution taskExecution) { + executionId = taskExecution.getExecutionId(); + logger.info("onTaskStartup:executionId={}", executionId); + } + + public static Long getExecutionId() { + return executionId; + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java index a321a337e0..d494195569 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListener.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,8 +16,8 @@ package org.springframework.cloud.dataflow.composedtaskrunner; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; @@ -33,16 +33,17 @@ * exit code. * * @author Glenn Renfro + * @author Corneil du Plessis */ public class ComposedTaskStepExecutionListener extends StepExecutionListenerSupport { + private final static Logger logger = LoggerFactory.getLogger(ComposedTaskStepExecutionListener.class); - private TaskExplorer taskExplorer; + private final TaskExplorerContainer taskExplorerContainer; - private static final Log logger = LogFactory.getLog(org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskStepExecutionListener.class); - - public ComposedTaskStepExecutionListener(TaskExplorer taskExplorer) { - Assert.notNull(taskExplorer, "taskExplorer must not be null."); - this.taskExplorer = taskExplorer; + public ComposedTaskStepExecutionListener(TaskExplorerContainer taskExplorerContainer) { + Assert.notNull(taskExplorerContainer, "taskExplorerContainer must not be null."); + this.taskExplorerContainer = taskExplorerContainer; + logger.info("ComposedTaskStepExecutionListener supporting {}", taskExplorerContainer.getKeys()); } /** @@ -54,34 +55,46 @@ public ComposedTaskStepExecutionListener(TaskExplorer taskExplorer) { * returned. If no exit message is set or * {@link TaskLauncherTasklet#IGNORE_EXIT_MESSAGE_PROPERTY} is set to true as a task property * and the exit code of the task is zero then the ExitStatus of COMPLETED is returned. + * * @param stepExecution The stepExecution that kicked of the Task. * @return ExitStatus of COMPLETED else FAILED. */ @Override public ExitStatus afterStep(StepExecution stepExecution) { + logger.info("AfterStep processing for stepExecution {}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId()); ExitStatus result = ExitStatus.COMPLETED; - logger.info(String.format("AfterStep processing for stepExecution %s", - stepExecution.getStepName())); - Long executionId = (Long) stepExecution.getExecutionContext().get("task-execution-id"); - Assert.notNull(executionId, "TaskLauncherTasklet did not " + - "return a task-execution-id. Check to see if task " + - "exists."); - - TaskExecution resultExecution = this.taskExplorer.getTaskExecution(executionId); - + Assert.notNull(executionId, "TaskLauncherTasklet for job " + stepExecution.getJobExecutionId() + + " did not return a task-execution-id. Check to see if task exists."); + String schemaTarget = stepExecution.getExecutionContext().getString("schema-target"); + String taskName = stepExecution.getExecutionContext().getString("task-name"); + Assert.notNull(taskName, "TaskLauncherTasklet for job " + stepExecution.getJobExecutionId() + + " did not return a task-name. Check to see if task exists."); + String explorerName = taskName; + if (!this.taskExplorerContainer.getKeys().contains(taskName)) { + Assert.notNull(schemaTarget, "TaskLauncherTasklet for job " + stepExecution.getJobExecutionId() + + " did not return a schema-target. Check to see if task exists."); + explorerName = schemaTarget; + } + logger.info("AfterStep for {}:{}:{}:{}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId(), taskName, executionId, schemaTarget); + TaskExplorer taskExplorer = this.taskExplorerContainer.get(explorerName); + TaskExecution resultExecution = taskExplorer.getTaskExecution(executionId); if (!stepExecution.getExecutionContext().containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE) && StringUtils.hasText(resultExecution.getExitMessage())) { result = new ExitStatus(resultExecution.getExitMessage()); - } - else if (resultExecution.getExitCode() != 0) { + } else if (resultExecution.getExitCode() != 0) { result = ExitStatus.FAILED; } - - logger.info(String.format("AfterStep processing complete for " + - "stepExecution %s with taskExecution %s", - stepExecution.getStepName(), executionId)); + logger.info("AfterStep processing complete for stepExecution {} with taskExecution {}:{}:{}:{}", stepExecution.getStepName(), stepExecution.getJobExecutionId(), taskName, executionId, schemaTarget); return result; } + @Override + public void beforeStep(StepExecution stepExecution) { + logger.info("beforeStep:{}:{}>>>>", stepExecution.getStepName(), stepExecution.getJobExecutionId()); + super.beforeStep(stepExecution); + logger.debug("beforeStep:{}", stepExecution.getExecutionContext()); + logger.info("beforeStep:{}:{}<<<", stepExecution.getStepName(), stepExecution.getJobExecutionId()); + + } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskExplorerContainer.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskExplorerContainer.java new file mode 100644 index 0000000000..4cd95b1727 --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskExplorerContainer.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.composedtaskrunner; + +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.task.repository.TaskExplorer; + +/** + * A container for the TaskExplorers for each Task by name. + * @author Corneil du Plessis + */ +public class TaskExplorerContainer { + private final static Logger logger = LoggerFactory.getLogger(TaskExplorerContainer.class); + + private final Map taskExplorers; + + private final TaskExplorer defaultTaskExplorer; + + public TaskExplorerContainer(Map taskExplorers, TaskExplorer defaultTaskExplorer) { + this.taskExplorers = taskExplorers; + this.defaultTaskExplorer = defaultTaskExplorer; + + } + + public TaskExplorer get(String name) { + TaskExplorer result = taskExplorers.get(name); + if (result == null) { + result = taskExplorers.get(SchemaVersionTarget.defaultTarget().getName()); + } + if(result == null) { + logger.warn("Cannot find TaskExplorer for {}. Using default", name); + result = defaultTaskExplorer; + } + return result; + } + public Set getKeys() { + return taskExplorers.keySet(); + } +} diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java index 310d58ec90..e4b23c6cb8 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java +++ b/spring-cloud-dataflow-composed-task-runner/src/main/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTasklet.java @@ -1,5 +1,5 @@ /* - * Copyright 2017-2022 the original author or authors. + * Copyright 2017-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,8 +21,11 @@ import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepContribution; import org.springframework.batch.core.UnexpectedJobExecutionException; @@ -40,10 +43,15 @@ import org.springframework.cloud.dataflow.rest.client.DataFlowOperations; import org.springframework.cloud.dataflow.rest.client.DataFlowTemplate; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.cloud.dataflow.rest.util.HttpClientConfigurer; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.core.env.Environment; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; +import org.springframework.lang.Nullable; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; import org.springframework.security.oauth2.client.registration.ClientRegistration; @@ -56,7 +64,7 @@ /** * Executes task launch request using Spring Cloud Data Flow's Restful API * then returns the execution id once the task launched. - * + *

* Note: This class is not thread-safe and as such should not be used as a singleton. * * @author Glenn Renfro @@ -66,36 +74,52 @@ public class TaskLauncherTasklet implements Tasklet { final static String IGNORE_EXIT_MESSAGE_PROPERTY = "ignore-exit-message"; - private ComposedTaskProperties composedTaskProperties; + private final ComposedTaskProperties composedTaskProperties; - private TaskExplorer taskExplorer; + private final TaskExplorer taskExplorer; private Map properties; private List arguments; - private String taskName; + private final String taskName; - private static final Log logger = LogFactory.getLog(org.springframework.cloud.dataflow.composedtaskrunner.TaskLauncherTasklet.class); + private static final Logger logger = LoggerFactory.getLogger(TaskLauncherTasklet.class); private Long executionId; + private final String ctrSchemaTarget; + private long timeout; - private ClientRegistrationRepository clientRegistrations; + private final ClientRegistrationRepository clientRegistrations; - private OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; + private final OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient; private TaskOperations taskOperations; TaskProperties taskProperties; + private final ObjectMapper mapper; + public TaskLauncherTasklet( ClientRegistrationRepository clientRegistrations, OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient, TaskExplorer taskExplorer, - ComposedTaskProperties composedTaskProperties, String taskName, - TaskProperties taskProperties) { + ComposedTaskProperties composedTaskProperties, + String taskName, + TaskProperties taskProperties, + Environment environment, + @Nullable ObjectMapper mapper + ) { + if (mapper == null) { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); + } + this.mapper = mapper; Assert.hasText(taskName, "taskName must not be empty nor null."); Assert.notNull(taskExplorer, "taskExplorer must not be null."); Assert.notNull(composedTaskProperties, @@ -107,22 +131,21 @@ public TaskLauncherTasklet( this.taskProperties = taskProperties; this.clientRegistrations = clientRegistrations; this.clientCredentialsTokenResponseClient = clientCredentialsTokenResponseClient; + this.ctrSchemaTarget = environment.getProperty("spring.cloud.task.schemaTarget"); } public void setProperties(Map properties) { - if(properties != null) { + if (properties != null) { this.properties = properties; - } - else { + } else { this.properties = new HashMap<>(0); } } public void setArguments(List arguments) { - if(arguments != null) { + if (arguments != null) { this.arguments = arguments; - } - else { + } else { this.arguments = new ArrayList<>(0); } } @@ -136,8 +159,7 @@ public void setArguments(List arguments) { * @return Repeat status of FINISHED. */ @Override - public RepeatStatus execute(StepContribution contribution, - ChunkContext chunkContext) { + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) { TaskOperations taskOperations = taskOperations(); if (this.executionId == null) { this.timeout = System.currentTimeMillis() + @@ -158,51 +180,61 @@ public RepeatStatus execute(StepContribution contribution, args = (List) stepExecutionContext.get("task-arguments"); } List cleansedArgs = new ArrayList<>(); - if(args != null) { - for(String argument : args) { - if(!argument.startsWith("--spring.cloud.task.parent-execution-id=")) { + if (args != null) { + for (String argument : args) { + if (!argument.startsWith("--spring.cloud.task.parent-execution-id=") && !argument.startsWith("--spring.cloud.task.parent-execution-id%")) { cleansedArgs.add(argument); } } args = cleansedArgs; } - String parentTaskExecutionId = getParentTaskExecutionId(contribution); - if(parentTaskExecutionId != null) { + if (args == null) { + args = new ArrayList<>(); + } + Long parentTaskExecutionId = getParentTaskExecutionId(); + if (parentTaskExecutionId != null) { args.add("--spring.cloud.task.parent-execution-id=" + parentTaskExecutionId); + String parentSchemaTarget = StringUtils.hasText(ctrSchemaTarget) ? ctrSchemaTarget : "boot2"; + args.add("--spring.cloud.task.parent-schema-target=" + parentSchemaTarget); + + } else { + logger.error("Cannot find task execution id"); } - if(StringUtils.hasText(this.composedTaskProperties.getPlatformName())) { + if (StringUtils.hasText(this.composedTaskProperties.getPlatformName())) { properties.put("spring.cloud.dataflow.task.platformName", this.composedTaskProperties.getPlatformName()); } - this.executionId = taskOperations.launch(tmpTaskName, - this.properties, args); + logger.debug("execute:{}:{}:{}", tmpTaskName, this.properties, args); + LaunchResponseResource response = taskOperations.launch(tmpTaskName, this.properties, args); + + this.executionId = response.getExecutionId(); + + stepExecutionContext.put("task-execution-id", response.getExecutionId()); + stepExecutionContext.put("schema-target", response.getSchemaTarget()); + stepExecutionContext.put("task-name", tmpTaskName); + if (!args.isEmpty()) { + stepExecutionContext.put("task-arguments", args); + } Boolean ignoreExitMessage = isIgnoreExitMessage(args, this.properties); if (ignoreExitMessage != null) { stepExecutionContext.put(IGNORE_EXIT_MESSAGE, ignoreExitMessage); } - stepExecutionContext.put("task-execution-id", executionId); - stepExecutionContext.put("task-arguments", args); - } - else { + } else { try { Thread.sleep(this.composedTaskProperties.getIntervalTimeBetweenChecks()); - } - catch (InterruptedException e) { + } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IllegalStateException(e.getMessage(), e); } - TaskExecution taskExecution = - this.taskExplorer.getTaskExecution(this.executionId); + TaskExecution taskExecution = this.taskExplorer.getTaskExecution(this.executionId); if (taskExecution != null && taskExecution.getEndTime() != null) { if (taskExecution.getExitCode() == null) { throw new UnexpectedJobExecutionException("Task returned a null exit code."); - } - else if (taskExecution.getExitCode() != 0) { + } else if (taskExecution.getExitCode() != 0) { throw new UnexpectedJobExecutionException("Task returned a non zero exit code."); - } - else { + } else { return RepeatStatus.FINISHED; } } @@ -216,19 +248,20 @@ else if (taskExecution.getExitCode() != 0) { return RepeatStatus.CONTINUABLE; } - public String getParentTaskExecutionId(StepContribution stepContribution) { + public Long getParentTaskExecutionId() { Long result = null; if (this.taskProperties.getExecutionid() != null) { result = this.taskProperties.getExecutionid(); + logger.debug("getParentTaskExecutionId:taskProperties.executionId={}", result); + } else if (ComposedTaskRunnerTaskListener.getExecutionId() != null) { + result = ComposedTaskRunnerTaskListener.getExecutionId(); + logger.debug("getParentTaskExecutionId:ComposedTaskRunnerTaskListener.executionId={}", result); } - else if (stepContribution != null) { - result = this.taskExplorer.getTaskExecutionIdByJobExecutionId(stepContribution.getStepExecution().getJobExecutionId()); - } - return result != null ? String.valueOf(result) : null; + return result; } public TaskOperations taskOperations() { - if(this.taskOperations == null) { + if (this.taskOperations == null) { this.taskOperations = dataFlowOperations().taskOperations(); if (taskOperations == null) { throw new ComposedTaskException("Unable to connect to Data Flow " + @@ -264,18 +297,15 @@ protected DataFlowOperations dataFlowOperations() { final OAuth2AccessTokenResponse res = this.clientCredentialsTokenResponseClient.getTokenResponse(grantRequest); accessTokenValue = res.getAccessToken().getTokenValue(); logger.debug("Configured OAuth2 Client Credentials for accessing the Data Flow Server"); - } - else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerAccessToken())) { + } else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerAccessToken())) { accessTokenValue = this.composedTaskProperties.getDataflowServerAccessToken(); logger.debug("Configured OAuth2 Access Token for accessing the Data Flow Server"); - } - else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUsername()) + } else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUsername()) && StringUtils.hasText(this.composedTaskProperties.getDataflowServerPassword())) { - accessTokenValue = null; - clientHttpRequestFactoryBuilder.basicAuthCredentials(composedTaskProperties.getDataflowServerUsername(), composedTaskProperties.getDataflowServerPassword()); + clientHttpRequestFactoryBuilder.basicAuthCredentials(composedTaskProperties.getDataflowServerUsername(), + composedTaskProperties.getDataflowServerPassword()); logger.debug("Configured basic security for accessing the Data Flow Server"); - } - else { + } else { logger.debug("Not configuring basic security for accessing the Data Flow Server"); } @@ -294,7 +324,7 @@ else if (StringUtils.hasText(this.composedTaskProperties.getDataflowServerUserna restTemplate.setRequestFactory(clientHttpRequestFactoryBuilder.buildClientHttpRequestFactory()); } - return new DataFlowTemplate(this.composedTaskProperties.getDataflowServerUri(), restTemplate); + return new DataFlowTemplate(this.composedTaskProperties.getDataflowServerUri(), restTemplate, mapper); } private void validateUsernamePassword(String userName, String password) { @@ -311,10 +341,9 @@ private Boolean isIgnoreExitMessage(List args, Map prope if (properties != null) { MapConfigurationPropertySource mapConfigurationPropertySource = new MapConfigurationPropertySource(); - properties.entrySet().forEach(entrySet -> { - String key = entrySet.getKey(); + properties.forEach((key, value) -> { key = key.substring(key.lastIndexOf(".") + 1); - mapConfigurationPropertySource.put(key, entrySet.getValue()); + mapConfigurationPropertySource.put(key, value); }); result = isIgnoreMessagePresent(mapConfigurationPropertySource); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties b/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties new file mode 100644 index 0000000000..4a28840efe --- /dev/null +++ b/spring-cloud-dataflow-composed-task-runner/src/main/resources/application.properties @@ -0,0 +1 @@ +spring.cloud.task.closecontext-enabled=true diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java index aaa6be299d..09d8d2b0af 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationNoPropertiesTests.java @@ -16,9 +16,8 @@ package org.springframework.cloud.dataflow.composedtaskrunner; +import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; - import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -51,12 +50,12 @@ * @author Glenn Renfro */ @ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={EmbeddedDataSourceConfiguration.class, - DataFlowTestConfiguration.class,StepBeanDefinitionRegistrar.class, +@ContextConfiguration(classes = {EmbeddedDataSourceConfiguration.class, + DataFlowTestConfiguration.class, StepBeanDefinitionRegistrar.class, ComposedTaskRunnerConfiguration.class, StepBeanDefinitionRegistrar.class}) -@TestPropertySource(properties = {"graph=AAA && BBB && CCC","max-wait-time=1000", "spring.cloud.task.name=foo"}) -@EnableAutoConfiguration(exclude = { CommonSecurityAutoConfiguration.class}) +@TestPropertySource(properties = {"graph=AAA && BBB && CCC", "max-wait-time=1000", "spring.cloud.task.name=foo"}) +@EnableAutoConfiguration(exclude = {CommonSecurityAutoConfiguration.class}) public class ComposedTaskRunnerConfigurationNoPropertiesTests { @Autowired @@ -83,6 +82,11 @@ public void testComposedConfiguration() throws Exception { assertThat(composedTaskProperties.getTransactionIsolationLevel()).isEqualTo("ISOLATION_REPEATABLE_READ"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); - verify(taskOperations).launch("AAA", new HashMap<>(0), Collections.singletonList("--spring.cloud.task.parent-execution-id=1")); + + verify(taskOperations).launch( + "AAA", + Collections.emptyMap(), + Arrays.asList("--spring.cloud.task.parent-execution-id=1", "--spring.cloud.task.parent-schema-target=boot2") + ); } } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java index 09710a6e46..4b7a0f8c26 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerConfigurationWithPropertiesTests.java @@ -110,6 +110,7 @@ public void testComposedConfiguration() throws Exception { List args = new ArrayList<>(2); args.add("--baz=boo --foo=bar"); args.add("--spring.cloud.task.parent-execution-id=1"); + args.add("--spring.cloud.task.parent-schema-target=boot2"); Assert.notNull(job.getJobParametersIncrementer(), "JobParametersIncrementer must not be null."); verify(taskOperations).launch("ComposedTest-AAA", props, args); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java index 6ba343310b..3218544f3c 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskRunnerStepFactoryTests.java @@ -18,6 +18,8 @@ import javax.sql.DataSource; +import java.util.Collections; + import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -44,17 +46,19 @@ /** * @author Glenn Renfro + * @author Corneil du Plessis */ @ExtendWith(SpringExtension.class) -@ContextConfiguration(classes={org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskRunnerStepFactoryTests.StepFactoryConfiguration.class}) +@ContextConfiguration(classes = {org.springframework.cloud.dataflow.composedtaskrunner.ComposedTaskRunnerStepFactoryTests.StepFactoryConfiguration.class}) public class ComposedTaskRunnerStepFactoryTests { @Autowired ComposedTaskRunnerStepFactory stepFactory; @Test - public void testStep() throws Exception{ + public void testStep() throws Exception { Step step = stepFactory.getObject(); + assertThat(step).isNotNull(); assertThat(step.getName()).isEqualTo("FOOBAR"); assertThat(step.getStartLimit()).isEqualTo(Integer.MAX_VALUE); } @@ -68,6 +72,12 @@ public static class StepFactoryConfiguration { @MockBean public TaskOperations taskOperations; + @Bean + public TaskExplorerContainer taskExplorerContainer() { + TaskExplorer taskExplorer = mock(TaskExplorer.class); + return new TaskExplorerContainer(Collections.emptyMap(), taskExplorer); + } + @Bean public ComposedTaskProperties composedTaskProperties() { return new ComposedTaskProperties(); @@ -79,7 +89,7 @@ public TaskProperties taskProperties() { } @Bean - public StepBuilderFactory steps(){ + public StepBuilderFactory steps() { return new StepBuilderFactory(mock(JobRepository.class), mock(PlatformTransactionManager.class)); } diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java index e80a0074e3..08f31756a0 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/ComposedTaskStepExecutionListenerTests.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.composedtaskrunner; +import java.util.Collections; import java.util.Date; import org.junit.jupiter.api.BeforeEach; @@ -24,6 +25,7 @@ import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.StepExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.test.util.ReflectionTestUtils; @@ -38,6 +40,7 @@ */ public class ComposedTaskStepExecutionListenerTests { + private TaskExplorerContainer taskExplorerContainer; private TaskExplorer taskExplorer; private StepExecution stepExecution; @@ -47,17 +50,16 @@ public class ComposedTaskStepExecutionListenerTests { @BeforeEach public void setup() { this.taskExplorer = mock(TaskExplorer.class); + this.taskExplorerContainer = new TaskExplorerContainer(Collections.emptyMap(), taskExplorer); this.stepExecution = getStepExecution(); - this.taskListener = - new ComposedTaskStepExecutionListener(this.taskExplorer); - ReflectionTestUtils.setField(this.taskListener, "taskExplorer", this.taskExplorer); + this.taskListener = new ComposedTaskStepExecutionListener(this.taskExplorerContainer); } @Test public void testSuccessfulRun() { TaskExecution taskExecution = getDefaultTaskExecution(0, null); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(),111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(ExitStatus.COMPLETED); } @@ -67,7 +69,7 @@ public void testExitMessageRunSuccess() { TaskExecution taskExecution = getDefaultTaskExecution(0, expectedTaskStatus.getExitCode()); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(expectedTaskStatus); } @@ -78,7 +80,7 @@ public void testExitMessageRunFail() { TaskExecution taskExecution = getDefaultTaskExecution(1, expectedTaskStatus.getExitCode()); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(expectedTaskStatus); } @@ -87,7 +89,7 @@ public void testExitMessageRunFail() { public void testFailedRun() { TaskExecution taskExecution = getDefaultTaskExecution(1, null); when(this.taskExplorer.getTaskExecution(anyLong())).thenReturn(taskExecution); - populateExecutionContext(111L); + populateExecutionContext(taskExecution.getTaskName(), 111L, SchemaVersionTarget.defaultTarget().getName()); assertThat(this.taskListener.afterStep(this.stepExecution)).isEqualTo(ExitStatus.FAILED); } @@ -108,14 +110,16 @@ private StepExecution getStepExecution() { return new StepExecution(STEP_NAME, jobExecution); } - private void populateExecutionContext(Long taskExecutionId) { - this.stepExecution.getExecutionContext().put("task-execution-id", - taskExecutionId); + private void populateExecutionContext(String taskName, Long taskExecutionId, String schemaTarget) { + this.stepExecution.getExecutionContext().put("task-name", taskName); + this.stepExecution.getExecutionContext().put("task-execution-id", taskExecutionId); + this.stepExecution.getExecutionContext().put("schema-target", schemaTarget); } private TaskExecution getDefaultTaskExecution (int exitCode, String exitMessage) { TaskExecution taskExecution = new TaskExecution(); + taskExecution.setTaskName("test-ctr"); taskExecution.setExitMessage(exitMessage); taskExecution.setExitCode(exitCode); taskExecution.setEndTime(new Date()); diff --git a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java index b24fb7560a..077f6ff375 100644 --- a/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java +++ b/spring-cloud-dataflow-composed-task-runner/src/test/java/org/springframework/cloud/dataflow/composedtaskrunner/TaskLauncherTaskletTests.java @@ -23,6 +23,9 @@ import javax.sql.DataSource; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.assertj.core.api.Assertions; import org.assertj.core.api.AssertionsForClassTypes; import org.junit.jupiter.api.BeforeEach; @@ -46,9 +49,13 @@ import org.springframework.cloud.dataflow.composedtaskrunner.properties.ComposedTaskProperties; import org.springframework.cloud.dataflow.composedtaskrunner.support.ComposedTaskException; import org.springframework.cloud.dataflow.composedtaskrunner.support.TaskExecutionTimeoutException; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; import org.springframework.cloud.dataflow.rest.client.DataFlowClientException; import org.springframework.cloud.dataflow.rest.client.DataFlowOperations; import org.springframework.cloud.dataflow.rest.client.TaskOperations; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.TaskExecution; @@ -62,7 +69,9 @@ import org.springframework.cloud.task.repository.support.TaskRepositoryInitializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.core.env.Environment; import org.springframework.hateoas.Link; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.hateoas.mediatype.vnderrors.VndErrors; import org.springframework.security.oauth2.client.endpoint.OAuth2AccessTokenResponseClient; import org.springframework.security.oauth2.client.endpoint.OAuth2ClientCredentialsGrantRequest; @@ -101,21 +110,31 @@ public class TaskLauncherTaskletTests { @Autowired private JdbcTaskExecutionDao taskExecutionDao; + @Autowired + private Environment environment; private TaskOperations taskOperations; private TaskRepository taskRepository; private TaskExplorer taskExplorer; + private ObjectMapper mapper; + @BeforeEach public void setup() throws Exception{ + if (this.mapper == null) { + this.mapper = new ObjectMapper(); + this.mapper.registerModule(new Jdk8Module()); + this.mapper.registerModule(new Jackson2HalModule()); + this.mapper.registerModule(new JavaTimeModule()); + this.mapper.registerModule(new Jackson2DataflowModule()); + } this.taskRepositoryInitializer.setDataSource(this.dataSource); - this.taskRepositoryInitializer.afterPropertiesSet(); this.taskOperations = mock(TaskOperations.class); TaskExecutionDaoFactoryBean taskExecutionDaoFactoryBean = - new TaskExecutionDaoFactoryBean(this.dataSource); + new MultiSchemaTaskExecutionDaoFactoryBean(this.dataSource, "TASK_"); this.taskRepository = new SimpleTaskRepository(taskExecutionDaoFactoryBean); this.taskExplorer = new SimpleTaskExplorer(taskExecutionDaoFactoryBean); this.composedTaskProperties.setIntervalTimeBetweenChecks(500); @@ -123,7 +142,7 @@ public void setup() throws Exception{ @Test @DirtiesContext - public void testTaskLauncherTasklet() throws Exception{ + public void testTaskLauncherTasklet() { createCompleteTaskExecution(0); TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); @@ -133,6 +152,9 @@ public void testTaskLauncherTasklet() throws Exception{ assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); mockReturnValForTaskExecution(2L); chunkContext = chunkContext(); @@ -142,17 +164,28 @@ public void testTaskLauncherTasklet() throws Exception{ assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(2L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); } @Test @DirtiesContext public void testInvalidTaskOperations() { - TaskLauncherTasklet taskLauncherTasklet = new TestTaskLauncherTasklet(null, null, - this.taskExplorer, this.composedTaskProperties, - TASK_NAME, new TaskProperties()); - Exception exception = assertThrows(ComposedTaskException.class, () -> { - execute(taskLauncherTasklet, null, chunkContext()); - }); + TaskLauncherTasklet taskLauncherTasklet = new TestTaskLauncherTasklet( + null, + null, + this.taskExplorer, + this.composedTaskProperties, + TASK_NAME, + new TaskProperties(), + environment, + mapper + ); + Exception exception = assertThrows( + ComposedTaskException.class, + () -> execute(taskLauncherTasklet, null, chunkContext()) + ); AssertionsForClassTypes.assertThat(exception.getMessage()).isEqualTo( "Unable to connect to Data Flow Server to execute task operations. " + "Verify that Data Flow Server's tasks/definitions endpoint can be accessed."); @@ -172,7 +205,10 @@ public void testTaskLauncherTaskletWithTaskExecutionId() { assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(2L); - assertThat(((List) chunkContext.getStepContext() + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + assertThat(((List) chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=88"); } @@ -195,7 +231,10 @@ public void testTaskLauncherTaskletWithoutTaskExecutionId() { assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(2L); - assertThat(((List) chunkContext.getStepContext() + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + assertThat(((List) chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=1"); } @@ -220,7 +259,10 @@ public void testTaskLauncherTaskletWithTaskExecutionIdWithPreviousParentID() { assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(2L); - assertThat(((List) chunkContext.getStepContext() + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); + assertThat(((List) chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-arguments")).get(0)).isEqualTo("--spring.cloud.task.parent-execution-id=88"); } @@ -252,7 +294,8 @@ public void testInvalidTaskName() { TaskLauncherTasklet taskLauncherTasklet = getTaskExecutionTasklet(); ChunkContext chunkContext = chunkContext(); Throwable exception = assertThrows(DataFlowClientException.class, - () -> taskLauncherTasklet.execute(null, chunkContext)); + () -> taskLauncherTasklet.execute(null, chunkContext) + ); Assertions.assertThat(exception.getMessage()).isEqualTo(ERROR_MESSAGE); } @@ -314,7 +357,7 @@ public void testTaskOperationsConfiguredWithMissingPassword() { TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, composedTaskProperties, - TASK_NAME, new TaskProperties()); + TASK_NAME, new TaskProperties(), environment, mapper); taskLauncherTasklet.taskOperations(); } catch (IllegalArgumentException e) { @@ -338,6 +381,9 @@ public void testTaskLauncherTaskletIgnoreExitMessage() { Assertions.assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); Assertions.assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); @@ -357,6 +403,9 @@ public void testTaskLauncherTaskletIgnoreExitMessageViaProperties() { Assertions.assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); Assertions.assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE)).isTrue(); @@ -377,6 +426,9 @@ public void testTaskLauncherTaskletIgnoreExitMessageViaCommandLineOverride() { Assertions.assertThat(chunkContext.getStepContext() .getStepExecution().getExecutionContext() .get("task-execution-id")).isEqualTo(1L); + assertThat(chunkContext.getStepContext() + .getStepExecution().getExecutionContext() + .get("schema-target")).isEqualTo(SchemaVersionTarget.defaultTarget().getName()); boolean value = chunkContext.getStepContext() .getStepExecution().getExecutionContext() .containsKey(TaskLauncherTasklet.IGNORE_EXIT_MESSAGE); @@ -397,7 +449,7 @@ public void testTaskOperationsConfiguredWithMissingUsername() { TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, composedTaskProperties, - TASK_NAME, new TaskProperties()); + TASK_NAME, new TaskProperties(), environment, mapper); taskLauncherTasklet.taskOperations(); } catch (IllegalArgumentException e) { @@ -433,7 +485,7 @@ private TaskLauncherTasklet getTaskExecutionTasklet() { private TaskLauncherTasklet getTaskExecutionTasklet(TaskProperties taskProperties) { TaskLauncherTasklet taskLauncherTasklet = new TaskLauncherTasklet(null, null, this.taskExplorer, this.composedTaskProperties, - TASK_NAME, taskProperties); + TASK_NAME, taskProperties, environment, mapper); ReflectionTestUtils.setField(taskLauncherTasklet, "taskOperations", this.taskOperations); return taskLauncherTasklet; } @@ -448,9 +500,11 @@ private ChunkContext chunkContext () StepContext stepContext = new StepContext(stepExecution); return new ChunkContext(stepContext); } - private void mockReturnValForTaskExecution(long executionId) { - Mockito.doReturn(executionId) + mockReturnValForTaskExecution(executionId, SchemaVersionTarget.defaultTarget().getName()); + } + private void mockReturnValForTaskExecution(long executionId, String schemaTarget) { + Mockito.doReturn(new LaunchResponseResource(executionId, schemaTarget)) .when(this.taskOperations) .launch(ArgumentMatchers.anyString(), ArgumentMatchers.any(), @@ -480,8 +534,10 @@ public TestTaskLauncherTasklet( OAuth2AccessTokenResponseClient clientCredentialsTokenResponseClient, TaskExplorer taskExplorer, ComposedTaskProperties composedTaskProperties, String taskName, - TaskProperties taskProperties) { - super(clientRegistrations, clientCredentialsTokenResponseClient,taskExplorer,composedTaskProperties,taskName,taskProperties); + TaskProperties taskProperties, + Environment environment, + ObjectMapper mapper) { + super(clientRegistrations, clientCredentialsTokenResponseClient,taskExplorer,composedTaskProperties,taskName,taskProperties, environment, mapper); } @Override diff --git a/spring-cloud-dataflow-core/pom.xml b/spring-cloud-dataflow-core/pom.xml index 5e36a4446a..d955b2cc28 100644 --- a/spring-cloud-dataflow-core/pom.xml +++ b/spring-cloud-dataflow-core/pom.xml @@ -24,11 +24,20 @@ + + org.springframework.cloud + spring-cloud-task-batch + org.springframework.cloud spring-cloud-dataflow-core-dsl ${project.version} + + org.springframework.cloud + spring-cloud-dataflow-schema-core + ${project.version} + org.springframework.cloud spring-cloud-deployer-spi @@ -84,17 +93,4 @@ test - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.11.0 - - 1.8 - 1.8 - - - - diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java index 53d5378d0e..18532d1408 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppRegistration.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import javax.persistence.Table; import javax.persistence.Transient; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.util.Assert; /** @@ -37,6 +38,7 @@ * @author Christian Tzolov * @author Vinicius Carvalho * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ @Entity @Table(name = "AppRegistration") @@ -228,7 +230,7 @@ public String toString() { return "AppRegistration{" + "name='" + this.getName() + '\'' + ", type='" + this.getType() + '\'' + ", version='" + this.getVersion() + '\'' + ", uri=" + this.getUri() + ", metadataUri=" + this.getMetadataUri() + - ", bootVersion=\'" + this.getBootVersion().getBootVersion() + '}'; + ", bootVersion='" + this.getBootVersion().getBootVersion() + '}'; } @Override diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/LaunchResponse.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/LaunchResponse.java new file mode 100644 index 0000000000..6fd87b79c1 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/LaunchResponse.java @@ -0,0 +1,59 @@ +package org.springframework.cloud.dataflow.core; + +import java.util.Objects; + +public class LaunchResponse { + private long executionId; + + private String schemaTarget; + + public LaunchResponse() { + } + + public LaunchResponse(long executionId, String schemaTarget) { + this.executionId = executionId; + this.schemaTarget = schemaTarget; + } + + public long getExecutionId() { + return executionId; + } + + public void setExecutionId(long executionId) { + this.executionId = executionId; + } + + public String getSchemaTarget() { + return schemaTarget; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + LaunchResponse that = (LaunchResponse) o; + + if (executionId != that.executionId) return false; + return Objects.equals(schemaTarget, that.schemaTarget); + } + + @Override + public int hashCode() { + int result = (int) (executionId ^ (executionId >>> 32)); + result = 31 * result + (schemaTarget != null ? schemaTarget.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return "LaunchResponse{" + + "taskId=" + executionId + + ", schemaTarget='" + schemaTarget + '\'' + + '}'; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java similarity index 95% rename from spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java rename to spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java index 40f3b6c05b..2b1aea7b01 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/DatabaseType.java +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/DatabaseType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.server.repository.support; +package org.springframework.cloud.dataflow.core.database.support; import java.util.HashMap; import java.util.Map; @@ -37,7 +37,9 @@ public enum DatabaseType { HSQL("HSQL Database Engine"), H2("H2"), ORACLE("Oracle"), + MARIADB("MariaDB"), MYSQL("MySQL"), + POSTGRES("PostgreSQL"), SQLSERVER("Microsoft SQL Server"), DB2("DB2"); @@ -73,12 +75,10 @@ public static DatabaseType fromMetaData(DataSource dataSource) throws MetaDataAc .toString(); if (!databaseProductVersion.startsWith("SQL")) { databaseProductName = "DB2ZOS"; - } - else { + } else { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } - } - else { + } else if(!databaseProductName.equals("MariaDB")) { databaseProductName = JdbcUtils.commonDatabaseName(databaseProductName); } return fromProductName(databaseProductName); diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java new file mode 100644 index 0000000000..87f8f26667 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/IncrementerType.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.core.database.support; + +public enum IncrementerType { + DEFAULT, + TABLE, + SEQUENCE +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java new file mode 100644 index 0000000000..ab6ee1cdc8 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MariaDBSequenceMaxValueIncrementer.java @@ -0,0 +1,18 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.jdbc.support.incrementer.AbstractSequenceMaxValueIncrementer; + +public class MariaDBSequenceMaxValueIncrementer extends AbstractSequenceMaxValueIncrementer { + public MariaDBSequenceMaxValueIncrementer() { + } + + public MariaDBSequenceMaxValueIncrementer(DataSource dataSource, String incrementerName) { + super(dataSource, incrementerName); + } + + protected String getSequenceQuery() { + return "select next value for " + this.getIncrementerName(); + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java new file mode 100644 index 0000000000..0830574f77 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaIncrementerFactory.java @@ -0,0 +1,74 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.jdbc.support.incrementer.DataFieldMaxValueIncrementer; + +public class MultiSchemaIncrementerFactory extends DefaultDataFieldMaxValueIncrementerFactory { + private final static Logger logger = LoggerFactory.getLogger(MultiSchemaIncrementerFactory.class); + + private final DataSource dataSource; + + public MultiSchemaIncrementerFactory(DataSource dataSource) { + super(dataSource); + this.dataSource = dataSource; + } + + @Override + public DataFieldMaxValueIncrementer getIncrementer(String incrementerType, String incrementerName) { + DatabaseType databaseType; + try { + databaseType = DatabaseType.fromMetaData(this.dataSource); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + if (databaseType != null) { + IncrementerType type = getIncrementerType(incrementerName); + if (type == IncrementerType.SEQUENCE) { + switch (databaseType) { + case SQLSERVER: + return new SqlServerSequenceMaxValueIncrementer(this.dataSource, incrementerName); + case MARIADB: + return new MariaDBSequenceMaxValueIncrementer(this.dataSource, incrementerName); + } + } + } + return super.getIncrementer(incrementerType, incrementerName); + } + + private IncrementerType getIncrementerType(String incrementerName) { + + try (Connection connection = this.dataSource.getConnection()) { + DatabaseMetaData metaData = connection.getMetaData(); + String[] types = {"TABLE", "SEQUENCE"}; + ResultSet tables = metaData.getTables(null, null, "%", types); +// int count = tables.getMetaData().getColumnCount(); +// for (int i = 1; i <= count; i++) { +// logger.debug("Column:{}:{}", tables.getMetaData().getColumnName(i), tables.getMetaData().getColumnTypeName(i)); +// } + while (tables.next()) { + + if (tables.getString("TABLE_NAME").equals(incrementerName)) { + String tableType = tables.getString("TABLE_TYPE"); + logger.debug("Found Table:{}:{}", incrementerName, tableType); + if (tableType != null && tableType.toUpperCase().contains("SEQUENCE")) { + return IncrementerType.SEQUENCE; + } + return IncrementerType.TABLE; + } + } + } catch (SQLException sqe) { + logger.warn(sqe.getMessage(), sqe); + } + return IncrementerType.DEFAULT; + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaTaskExecutionDaoFactoryBean.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaTaskExecutionDaoFactoryBean.java new file mode 100644 index 0000000000..fdcedb1627 --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/MultiSchemaTaskExecutionDaoFactoryBean.java @@ -0,0 +1,35 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; +import org.springframework.cloud.task.repository.dao.TaskExecutionDao; +import org.springframework.cloud.task.repository.support.DatabaseType; +import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; +import org.springframework.jdbc.support.MetaDataAccessException; + +public class MultiSchemaTaskExecutionDaoFactoryBean extends TaskExecutionDaoFactoryBean { + private final DataSource dataSource; + private final String tablePrefix; + public MultiSchemaTaskExecutionDaoFactoryBean(DataSource dataSource, String tablePrefix) { + super(dataSource, tablePrefix); + this.dataSource = dataSource; + this.tablePrefix = tablePrefix; + } + + @Override + public TaskExecutionDao getObject() throws Exception { + DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + JdbcTaskExecutionDao dao = new JdbcTaskExecutionDao(dataSource, this.tablePrefix); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } + catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + dao.setTaskIncrementer(incrementerFactory.getIncrementer(databaseType, this.tablePrefix + "SEQ")); + return dao; + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java new file mode 100644 index 0000000000..e301274a9c --- /dev/null +++ b/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/database/support/SqlServerSequenceMaxValueIncrementer.java @@ -0,0 +1,18 @@ +package org.springframework.cloud.dataflow.core.database.support; + +import javax.sql.DataSource; + +import org.springframework.jdbc.support.incrementer.AbstractSequenceMaxValueIncrementer; + +public class SqlServerSequenceMaxValueIncrementer extends AbstractSequenceMaxValueIncrementer { + public SqlServerSequenceMaxValueIncrementer() { + } + + public SqlServerSequenceMaxValueIncrementer(DataSource dataSource, String incrementerName) { + super(dataSource, incrementerName); + } + + protected String getSequenceQuery() { + return "select next value for " + this.getIncrementerName(); + } +} diff --git a/spring-cloud-dataflow-docs/pom.xml b/spring-cloud-dataflow-docs/pom.xml index 6f4dcb6bd0..edcd7b25d5 100644 --- a/spring-cloud-dataflow-docs/pom.xml +++ b/spring-cloud-dataflow-docs/pom.xml @@ -62,14 +62,6 @@ spring-cloud-dataflow-completion ${project.version} - - io.spring.docresources - spring-doc-resources - ${docs.resources.version} - zip - system - ${rootDir}/lib/spring-doc-resources-0.2.5.zip - @@ -105,26 +97,6 @@ - - org.apache.maven.plugins - maven-dependency-plugin - - - unpack-doc-resources - - unpack-dependencies - - generate-resources - - io.spring.docresources - spring-doc-resources - zip - true - ${project.build.directory}/refdocs/ - - - - org.apache.maven.plugins maven-resources-plugin @@ -151,7 +123,14 @@ org.asciidoctor asciidoctor-maven-plugin - 2.2.3 + 2.2.4 + + + io.spring.asciidoctor.backends + spring-asciidoctor-backends + 0.0.5 + + ${project.build.directory}/refdocs/ ${project.build.directory}/generated-docs @@ -184,7 +163,7 @@ process-asciidoc - html5 + spring-html highlight.js book diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc index b53a3b2970..c96804b6da 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/api-guide.adoc @@ -24,11 +24,13 @@ The API version can be seen at the end of the home page of Spring Cloud Data Flo ==== [source,json] ---- -{"_links": - {"dashboard":{"href":"http://localhost:9393/dashboard"} - ... - }, -"api.revision":14} +{ + "_links": { + "dashboard": { "href" : "http://localhost:9393/dashboard" }, + ... + }, + "api.revision":15 +} ---- ==== The table below shows the SCDF Release version and its current RESTful API version. @@ -36,6 +38,9 @@ The table below shows the SCDF Release version and its current RESTful API versi |=== | SCDF Version | API Version +| 2.11.x +| 14 + | 2.10.x | 14 @@ -141,6 +146,7 @@ The API includes the following resources: * <> * <> +* <> * <> * <> * <> @@ -619,6 +625,67 @@ include::{snippets}/app-registry-documentation/unregistering-all-applications/cu include::{snippets}/app-registry-documentation/unregistering-all-applications/http-response.adoc[] +[[api-guide-resources-schema-info]] +=== Schema Information + +The schema information endpoint provides information about the supported Spring Boot schema versions for Task and Batch applications and the available Schema Targets. + +The following topics provide more details: + +* <> +* <> + +[[api-guide-resources-schema-info-versions]] +==== List All Schema Versions + +The schema endpoint provides for listing supported Spring Boot versions. + +The following topics provide more details: + +* <> +* <> +* <> + +[[api-guide-resources-schema-info-versions-request-structure]] +===== Request Structure + +include::{snippets}/schema-documentation/schema-versions/http-request.adoc[] + +[[api-guide-resources-schema-info-versions-example-request]] +===== Example Request + +include::{snippets}/schema-documentation/schema-versions/curl-request.adoc[] + +[[api-guide-resources-schema-info-versions-response-structure]] +===== Response Structure + +include::{snippets}/schema-documentation/schema-versions/http-response.adoc[] + +[[api-guide-resources-schema-info-targets]] +==== List All Schema Targets + +The schema endpoint provides for listing supported Schema Targets. + +The following topics provide more details: + +* <> +* <> +* <> + +[[api-guide-resources-schema-info-targets-request-structure]] +===== Request Structure + +include::{snippets}/schema-documentation/schema-targets/http-request.adoc[] + +[[api-guide-resources-schema-info-targets-example-request]] +===== Example Request + +include::{snippets}/schema-documentation/schema-targets/curl-request.adoc[] + +[[api-guide-resources-schema-info-targets-response-structure]] +===== Response Structure + +include::{snippets}/schema-documentation/schema-targets/http-response.adoc[] [[api-guide-resources-audit-records]] === Audit Records @@ -1802,9 +1869,10 @@ The following topics provide more details: [[api-guide-resources-task-executions-launching]] -==== Launching a Task +==== Launching a Task (Legacy) + +Launching a task is done by requesting the creation of a new task execution. This endpoint will fail if the task is registered as a Spring Boot 3 application. -Launching a task is done by requesting the creation of a new task execution. The following topics provide more details: * <> @@ -1812,8 +1880,6 @@ The following topics provide more details: * <> * <> - - [[api-guide-resources-task-executions-launching-request-structure]] ===== Request Structure @@ -1839,6 +1905,45 @@ include::{snippets}/task-executions-documentation/launch-task/curl-request.adoc[ include::{snippets}/task-executions-documentation/launch-task/http-response.adoc[] +[[api-guide-resources-task-executions-launching-boot3]] +==== Launching a Task + +Launching a task is done by requesting the creation of a new task execution. The response will contain an execution id and a schema target. + +The following topics provide more details: + +* <> +* <> +* <> +* <> + + +[[api-guide-resources-task-executions-launching-boot3-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/launch-task-boot3/http-request.adoc[] + + + +[[api-guide-resources-task-executions-launching-boot3-request-parameters]] +===== Request Parameters + +include::{snippets}/task-executions-documentation/launch-task-boot3/request-parameters.adoc[] + + + +[[api-guide-resources-task-executions-launching-boot3-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/launch-task-boot3/curl-request.adoc[] + + +[[api-guide-resources-task-executions-launching-boot3-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/launch-task-boot3/http-response.adoc[] + + [[api-guide-resources-task-executions-stopping]] ==== Stopping a Task @@ -1979,9 +2084,9 @@ The following topics provide more details: [[api-guide-resources-task-executions-detail-request-structure]] ===== Request Structure -include::{snippets}/task-executions-documentation/launch-task-display-detail/http-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/http-request.adoc[] -include::{snippets}/task-executions-documentation/launch-task-display-detail/path-parameters.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/path-parameters.adoc[] @@ -1995,17 +2100,57 @@ There are no request parameters for this endpoint. [[api-guide-resources-task-executions-detail-example-request]] ===== Example Request -include::{snippets}/task-executions-documentation/launch-task-display-detail/curl-request.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/curl-request.adoc[] [[api-guide-resources-task-executions-detail-response-structure]] ===== Response Structure -include::{snippets}/task-executions-documentation/launch-task-display-detail/http-response.adoc[] +include::{snippets}/task-executions-documentation/get-task-display-detail/http-response.adoc[] + + +[[api-guide-resources-task-executions-detail-by-external-id]] +==== Task Execution Detail by External Id + +The task executions endpoint lets you get the details about a task execution. +The following topics provide more details: + +* <> +* <> +* <> +* <> + + + +[[api-guide-resources-task-executions-detail-by-external-id-request-structure]] +===== Request Structure + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/http-request.adoc[] + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/path-parameters.adoc[] + + + +[[api-guide-resources-task-executions-detail-by-external-id-request-parameters]] +===== Request Parameters + +There are no request parameters for this endpoint. +[[api-guide-resources-task-executions-detail-by-external-id-example-request]] +===== Example Request + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/curl-request.adoc[] + + + +[[api-guide-resources-task-executions-detail-by-external-id-response-structure]] +===== Response Structure + +include::{snippets}/task-executions-documentation/get-task-display-detail-by-external-id/http-response.adoc[] + [[api-guide-resources-task-executions-delete]] ==== Delete Task Execution diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc index 6525e982f5..39e03fafe0 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration-carvel.adoc @@ -3,7 +3,9 @@ Deployment of a carvel package requires the installation of tools and specific Kubernetes controllers. Then you will add the package repository to the cluster and install the application. -== Required Tools +For local minikube or kind cluster you can use: xref:local-k8s-development[Configure Kubernetes for local development or testing], and follow the instructions until the section _Deploy Spring Cloud Data Flow_ + +=== Required Tools * `kubectl` - Kubernetes CLI (Install with `brew install kubectl`) * `carvel` - Packaging and Deployment tools @@ -37,27 +39,27 @@ These scripts assume you are connected to a Kubernetes cluster and `kubectl` is |=== |Name | Arguments |Descriptions -| carvel/start-deploy.sh +| start-deploy.sh | [scdf-type] [namespace] [release\|snapshot] | Configures environmental variables needs for the rest of the scripts. `BROKER`, `NS` and `SCDF_TYPE` are set. The default `NS` is `scdf`. The namespace will be created if it doesn't exist by `setup-scdf-repo.sh`. The default `SCDF_TYPE` is `oss`. _release\|snapshot_ and _scdf-type_ will determine the value of `PACKAGE_VERSION` set. -| carvel/prepare-cluster.sh +| prepare-cluster.sh | N/A | Installs cert-manager, secretgen-controller and kapp-controller -| carvel/carvel-use-template.sh +| carvel-use-template.sh | [scdf-type] (oss, pro) | Creates `scdf-values.yml` in current directory based on `scdf-pro-values.yml` or `scdf-oss-values.yml` -| carvel/setup-scdf-repo.sh +| setup-scdf-repo.sh | [scdf-type] (oss, pro) | Creates the namespace and installs the relevant Carvel package and credentials. If the optional _scdf-type_ is not provided the environmental variable `SCDF_TYPE` will be used. -| carvel/configure-prometheus-proxy.sh +| configure-prometheus-proxy.sh | [step] | Configures Spring Boot Actuator properties for Data Flow, Skipper, Streams and Tasks. Default `step` is 10s -| carvel/configure-database.sh +| configure-database.sh | [password/secret-username-key] [secret-password-key] | If only _secret-name_ is provided then _secret-username-key_ defaults to `username` and _secret-password-key_ defaults to `password`. @@ -68,21 +70,21 @@ The following 3 combinations are allowed after the _url_: * -| carvel/deploy-scdf.sh +| deploy-scdf.sh | [app-name] | Deploys the application using the package and `scdf-values.yml` in the current directory. The default _app-name_ is `scdf-${SCDF_TYPE}`. -| carvel/update-scdf.sh +| update-scdf.sh | [app-name] | Updated the deployed application using a modified values file. The default _app-name_ is `scdf-${SCDF_TYPE}`. -| carvel/export-dataflow-ip.sh +| export-dataflow-ip.sh | N/A | Will print the URL to access dataflow. If you use `source ./export-dataflow-ip.sh` it will export `DATAFLOW_URL` to be used by `register-apps.sh` -| carvel/register-apps.sh +| register-apps.sh | [stream-application-version] | _broker_ must be one of rabbit or kafka. _stream-application-version_ is optional and will install the latest version. The latest version is 2021.1.2 @@ -90,7 +92,11 @@ _stream-application-version_ is optional and will install the latest version. Th NOTE: Take note that the registration of application in the _pro_ version can take a few minutes since it retrieves all version information and metadata upfront. -== Prepare Configuration parameters +=== Preparation +You will need to prepare a values file named scdf-values.yml +The following steps will provide help. + +==== Prepare Configuration parameters Executing the following script will configure the environmental variables needed. @@ -147,7 +153,7 @@ The environmental variables can also be configured manually to override the valu |=== -== Prepare Configuration file +==== Prepare Configuration file Create a file name `scdf-values.yml` by executing: @@ -160,7 +166,7 @@ Edit the file as needed to configure the deployment. The `deploy-local-` scripts _Uses scdf-type previously selected._ -== Prepare cluster and add repository +=== Prepare cluster and add repository Login to docker and optionally registry.pivotal.io for Spring Cloud Data Flow Pro. @@ -190,13 +196,13 @@ Load scdf repo package for the _scdf-type_ ./carvel/setup-scdf-repo.sh .... -== Install supporting services +=== Install supporting services In a production environment you should be using supported database and broker services or operators along with shared observability tools. For local development or demonstration the following can be used to install database, broker and prometheus. -=== Deploy local database. +==== Deploy local database. [source,shell] .... @@ -206,13 +212,13 @@ For local development or demonstration the following can be used to install data NOTE: This script updates `scdf-values.yml` with the correct secret name. -=== Deploy local message broker. +==== Deploy local message broker. [source,shell] .... ./carvel/deploy-local-broker.sh .... -=== Deploy local Prometheus and proxy. +==== Deploy local Prometheus and proxy. [source,shell] .... ./carvel/deploy-local-prometheus.sh @@ -229,7 +235,7 @@ In the case where and existing prometheus and prometheus proxy is deployed the p ./carvel/configure-prometheus-proxy.sh [step] .... -== Deploy Spring Cloud Data Flow +=== Deploy Spring Cloud Data Flow [source,shell] .... @@ -239,6 +245,6 @@ source ./carvel/export-dataflow-ip.sh ./carvel/register-apps.sh .... -== Update deployed application. +=== Update deployed application. You can modify the values file used during installation and then update the deployment using `./carvel/update-scdf.sh` diff --git a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc index 9571138055..d1b6912189 100644 --- a/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc +++ b/spring-cloud-dataflow-docs/src/main/asciidoc/configuration.adoc @@ -897,5 +897,5 @@ By using the `token_format` parameter, you can request the token to be either: include::configuration-local.adoc[] include::configuration-cloudfoundry.adoc[] -include::configuration-carvel.adoc[] include::configuration-kubernetes.adoc[] +include::configuration-carvel.adoc[] diff --git a/spring-cloud-dataflow-parent/pom.xml b/spring-cloud-dataflow-parent/pom.xml index 65d268688a..b7df2a5144 100644 --- a/spring-cloud-dataflow-parent/pom.xml +++ b/spring-cloud-dataflow-parent/pom.xml @@ -12,7 +12,8 @@ -Xdoclint:none 3.3.1 2.7.13 - 3.3.4-SNAPSHOT + 5.3.28 + 3.4.0-SNAPSHOT 2.11.0-SNAPSHOT ${dataflow.version} 2.9.0-SNAPSHOT @@ -20,11 +21,8 @@ 2.9.0-SNAPSHOT 2.9.0-SNAPSHOT 2.11.0-SNAPSHOT - - 2.4.5 - + 2.4.6 ${dataflow.version} - 0.8.8 3.0.2 2.2.0 @@ -38,13 +36,13 @@ 1.11.731 1.17.5 - 3.0.2 - 2.2.0 + 4.0.0-SNAPSHOT + 2.3.4 1.0.7 1.0.7 1.6.6 - 5.7.7 + 5.7.9 @@ -311,6 +309,8 @@ maven-surefire-plugin 3.0.0 + 1 + 1 **/Abstract*.java diff --git a/spring-cloud-dataflow-platform-cloudfoundry/pom.xml b/spring-cloud-dataflow-platform-cloudfoundry/pom.xml index bd30da362b..a1c3f9b7d1 100644 --- a/spring-cloud-dataflow-platform-cloudfoundry/pom.xml +++ b/spring-cloud-dataflow-platform-cloudfoundry/pom.xml @@ -66,6 +66,10 @@ org.apache.maven.plugins maven-surefire-plugin 3.0.0 + + 1 + 1 + org.apache.maven.surefire diff --git a/spring-cloud-dataflow-registry/pom.xml b/spring-cloud-dataflow-registry/pom.xml index 1e9f59a1f7..055a87fe82 100644 --- a/spring-cloud-dataflow-registry/pom.xml +++ b/spring-cloud-dataflow-registry/pom.xml @@ -76,6 +76,10 @@ org.apache.maven.plugins maven-surefire-plugin 3.0.0 + + 1 + 1 + org.apache.maven.surefire diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java index 010879ab83..b12198adfa 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/AppRegistryService.java @@ -18,10 +18,10 @@ import java.net.URI; import java.util.List; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.registry.support.NoSuchAppRegistrationException; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.core.io.Resource; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; diff --git a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java index 65ebfb5a6b..292e93fcd2 100644 --- a/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java +++ b/spring-cloud-dataflow-registry/src/main/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryService.java @@ -34,7 +34,6 @@ import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.audit.service.AuditServiceUtils; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.AuditActionType; @@ -42,6 +41,7 @@ import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository; import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; import org.springframework.cloud.dataflow.registry.support.NoSuchAppRegistrationException; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PropertiesLoaderUtils; import org.springframework.data.domain.Page; diff --git a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java index f42b89d03c..938e4007ee 100644 --- a/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java +++ b/spring-cloud-dataflow-registry/src/test/java/org/springframework/cloud/dataflow/registry/service/DefaultAppRegistryServiceTests.java @@ -28,11 +28,11 @@ import org.mockito.ArgumentCaptor; import org.springframework.cloud.dataflow.audit.service.DefaultAuditRecordService; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository; import org.springframework.cloud.dataflow.registry.support.AppResourceCommon; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.cloud.deployer.resource.maven.MavenProperties; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.DefaultResourceLoader; diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java index 69fd0fb71d..5ff1f161b2 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryOperations.java @@ -18,10 +18,10 @@ import java.util.Properties; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.DetailedAppRegistrationResource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.PagedModel; /** diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java index 685eb56a2c..de8c788491 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/AppRegistryTemplate.java @@ -18,10 +18,10 @@ import java.util.Properties; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.DetailedAppRegistrationResource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.Link; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java index 394f12b27c..6f330cb030 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/DataFlowTemplate.java @@ -21,6 +21,8 @@ import java.util.Map; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.springframework.cloud.dataflow.rest.Version; import org.springframework.cloud.dataflow.rest.resource.RootResource; @@ -65,7 +67,7 @@ public class DataFlowTemplate implements DataFlowOperations { /** * Holds discovered URLs of the API. */ - protected final Map resources = new HashMap(); + protected final Map resources = new HashMap<>(); /** * REST client for stream operations. @@ -126,8 +128,8 @@ public class DataFlowTemplate implements DataFlowOperations { * * @param baseURI Must not be null */ - public DataFlowTemplate(URI baseURI) { - this(baseURI, getDefaultDataflowRestTemplate()); + public DataFlowTemplate(URI baseURI, ObjectMapper mapper) { + this(baseURI, getDefaultDataflowRestTemplate(), mapper); } /** @@ -135,10 +137,10 @@ public DataFlowTemplate(URI baseURI) { * missing Mixins for Jackson will be added implicitly. For more information, please * see {@link #prepareRestTemplate(RestTemplate)}. * - * @param baseURI Must not be null + * @param baseURI Must not be null * @param restTemplate Must not be null */ - public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { + public DataFlowTemplate(URI baseURI, RestTemplate restTemplate, ObjectMapper mapper) { Assert.notNull(baseURI, "The provided baseURI must not be null."); Assert.notNull(restTemplate, "The provided restTemplate must not be null."); @@ -170,31 +172,37 @@ public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { if (resourceSupport.hasLink(StreamTemplate.DEFINITIONS_REL)) { this.streamOperations = new StreamTemplate(restTemplate, resourceSupport, getVersion()); this.runtimeOperations = new RuntimeTemplate(restTemplate, resourceSupport); - } - else { + } else { this.streamOperations = null; this.runtimeOperations = null; } if (resourceSupport.hasLink(TaskTemplate.DEFINITIONS_RELATION)) { + if (mapper == null) { + mapper = new ObjectMapper(); + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); + } this.taskOperations = new TaskTemplate(restTemplate, resourceSupport, getVersion()); this.jobOperations = new JobTemplate(restTemplate, resourceSupport); - if(resourceSupport.hasLink(SchedulerTemplate.SCHEDULES_RELATION)) { + if (resourceSupport.hasLink(SchedulerTemplate.SCHEDULES_RELATION)) { this.schedulerOperations = new SchedulerTemplate(restTemplate, resourceSupport); - } - else { + } else { schedulerOperations = null; } - } - else { + } else { this.taskOperations = null; this.jobOperations = null; this.schedulerOperations = null; } this.appRegistryOperations = new AppRegistryTemplate(restTemplate, resourceSupport); - this.completionOperations = new CompletionTemplate(restTemplate, - resourceSupport.getLink("completions/stream").get(), resourceSupport.getLink("completions/task").get()); - } - else { + this.completionOperations = new CompletionTemplate( + restTemplate, + resourceSupport.getLink("completions/stream").get(), + resourceSupport.getLink("completions/task").get() + ); + } else { this.aboutOperations = null; this.streamOperations = null; this.runtimeOperations = null; @@ -209,7 +217,7 @@ public DataFlowTemplate(URI baseURI, RestTemplate restTemplate) { private String getVersion() { String version = ""; AboutResource aboutResource = this.aboutOperations.get(); - if(aboutResource != null) { + if (aboutResource != null) { version = aboutResource.getVersionInfo().getCore().getVersion(); } return version; @@ -228,7 +236,7 @@ private String getVersion() { *

  • {@link ExecutionContextJacksonMixIn} *
  • {@link StepExecutionHistoryJacksonMixIn} * - * + *

    * Furthermore, this method will also register the {@link Jackson2HalModule} * * @param restTemplate Can be null. Instantiates a new {@link RestTemplate} if null @@ -268,7 +276,7 @@ public static RestTemplate prepareRestTemplate(RestTemplate restTemplate) { public static ObjectMapper prepareObjectMapper(ObjectMapper objectMapper) { Assert.notNull(objectMapper, "The objectMapper must not be null."); return objectMapper - .registerModules(new Jackson2HalModule(), new Jackson2DataflowModule()); + .registerModules(new Jackson2HalModule(), new Jackson2DataflowModule()); } /** @@ -281,12 +289,9 @@ public static RestTemplate getDefaultDataflowRestTemplate() { } public Link getLink(RepresentationModel resourceSupport, String rel) { - Link link = resourceSupport.getLink(rel).get(); - if (link == null) { - throw new DataFlowServerException( - "Server did not return a link for '" + rel + "', links: '" + resourceSupport + "'"); - } - return link; + return resourceSupport.getLink(rel).orElseThrow(() -> + new DataFlowServerException("Server did not return a link for '" + rel + "', links: '" + resourceSupport + "'") + ); } @Override diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java index ed575f0cd8..450ad1d850 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobOperations.java @@ -40,7 +40,7 @@ public interface JobOperations { * * @param id job execution id */ - void executionRestart(long id); + void executionRestart(long id, String schemaTarget); /** * @return the list job executions without step executions known to the system. @@ -76,7 +76,7 @@ public interface JobOperations { * @param id identifier of the job execution * @return {@link JobExecutionResource} */ - JobExecutionResource jobExecution(long id); + JobExecutionResource jobExecution(long id, String schemaTarget); /** * Return the {@link JobInstanceResource} for the id specified. @@ -84,7 +84,7 @@ public interface JobOperations { * @param id identifier of the job instasnce * @return {@link JobInstanceResource} */ - JobInstanceResource jobInstance(long id); + JobInstanceResource jobInstance(long id, String schemaTarget); /** * List step executions known for a specific job execution id. @@ -92,7 +92,7 @@ public interface JobOperations { * @param jobExecutionId the id of the job execution. * @return the paged list of step executions */ - PagedModel stepExecutionList(long jobExecutionId); + PagedModel stepExecutionList(long jobExecutionId, String schemaTarget); /** * Return StepExecutionProgressInfoResource for a specific job execution id and step @@ -102,6 +102,6 @@ public interface JobOperations { * @param stepExecutionId the id step execution to be returned. * @return the step execution progress info */ - StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId); + StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId, String schemaTarget); } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java index a88f635562..5f993e6c4a 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/JobTemplate.java @@ -16,6 +16,10 @@ package org.springframework.cloud.dataflow.rest.client; +import java.time.temporal.ValueRange; +import java.util.HashMap; +import java.util.Map; + import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; import org.springframework.cloud.dataflow.rest.resource.JobExecutionThinResource; import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; @@ -25,7 +29,12 @@ import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; import org.springframework.util.Assert; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.util.StringUtils; import org.springframework.web.client.RestTemplate; +import org.springframework.web.util.DefaultUriBuilderFactory; +import org.springframework.web.util.UriComponentsBuilder; /** * Implementation for {@link JobOperations}. @@ -90,26 +99,25 @@ public class JobTemplate implements JobOperations { @Override public PagedModel executionList() { - String uriTemplate = executionsLink.getHref(); - uriTemplate = uriTemplate + "?size=2000"; - - return restTemplate.getForObject(uriTemplate, JobExecutionResource.Page.class); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionsLink.getHref()).queryParam("size", "2000"); + return restTemplate.getForObject(builder.toUriString(), JobExecutionResource.Page.class); } @Override - public void executionRestart(long id) { - String uriTemplate = executionLink.expand(id).getHref(); - uriTemplate = uriTemplate + "?restart=true"; + public void executionRestart(long id, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionLink.expand(id).getHref()).queryParam("restart", "true"); - restTemplate.put(uriTemplate, null); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + restTemplate.put(builder.toUriString(), null); } @Override public PagedModel executionThinList() { - String uriTemplate = executionsLink.getHref(); - uriTemplate = uriTemplate + "?size=2000"; + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(executionsLink.getHref()).queryParam("size", "2000"); - return restTemplate.getForObject(uriTemplate, JobExecutionThinResource.Page.class); + return restTemplate.getForObject(builder.toUriString(), JobExecutionThinResource.Page.class); } @Override @@ -119,36 +127,50 @@ public PagedModel instanceList(String jobName) { @Override public PagedModel executionThinListByJobName(String jobName) { - return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), - JobExecutionThinResource.Page.class); + return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), JobExecutionThinResource.Page.class); } @Override public PagedModel executionListByJobName(String jobName) { - return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), - JobExecutionResource.Page.class); + return restTemplate.getForObject(executionByNameLink.expand(jobName).getHref(), JobExecutionResource.Page.class); } @Override - public JobExecutionResource jobExecution(long id) { - return restTemplate.getForObject(executionLink.expand(id).getHref(), JobExecutionResource.class); + public JobExecutionResource jobExecution(long id, String schemaTarget) { + String url = executionLink.expand(id).getHref(); + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(url); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + + return restTemplate.getForObject(builder.toUriString(), JobExecutionResource.class); } @Override - public JobInstanceResource jobInstance(long id) { - return restTemplate.getForObject(instanceLink.expand(id).getHref(), JobInstanceResource.class); + public JobInstanceResource jobInstance(long id, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(instanceLink.expand(id).getHref()); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + return restTemplate.getForObject(builder.toUriString(), JobInstanceResource.class); } @Override - public PagedModel stepExecutionList(long jobExecutionId) { - return restTemplate.getForObject(stepExecutionsLink.expand(jobExecutionId).getHref(), - StepExecutionResource.Page.class); + public PagedModel stepExecutionList(long jobExecutionId, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(stepExecutionsLink.expand(jobExecutionId).getHref()); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + return restTemplate.getForObject(builder.toUriString(), StepExecutionResource.Page.class); } @Override - public StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId) { - return restTemplate.getForObject(stepExecutionProgressLink.expand(jobExecutionId, stepExecutionId).getHref(), - StepExecutionProgressInfoResource.class); + public StepExecutionProgressInfoResource stepExecutionProgress(long jobExecutionId, long stepExecutionId, String schemaTarget) { + UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(stepExecutionProgressLink.expand(jobExecutionId, stepExecutionId).getHref()); + if (StringUtils.hasText(schemaTarget)) { + builder.queryParam("schemaTarget", schemaTarget); + } + return restTemplate.getForObject(builder.toUriString(), StepExecutionProgressInfoResource.class); } } diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java index a4590a04ec..ac9c40a085 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskOperations.java @@ -23,6 +23,7 @@ import javax.naming.OperationNotSupportedException; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.LauncherResource; import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; @@ -68,14 +69,14 @@ public interface TaskOperations { * @param arguments the command line arguments * @return long containing the TaskExecutionId */ - long launch(String name, Map properties, List arguments); + LaunchResponseResource launch(String name, Map properties, List arguments); /** * Request the stop of a group {@link org.springframework.cloud.task.repository.TaskExecution}s. * * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. */ - void stop(String ids); + void stop(String ids, String schemaTarget); /** * Request the stop of a group {@link org.springframework.cloud.task.repository.TaskExecution}s. @@ -83,7 +84,7 @@ public interface TaskOperations { * @param ids comma delimited set of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. * @param platform the platform name where the task is executing. */ - void stop(String ids, String platform); + void stop(String ids, String schemaTarget, String platform); /** * Destroy an existing task. @@ -119,7 +120,7 @@ public interface TaskOperations { * @param id identifier of the task execution * @return {@link TaskExecutionResource} */ - TaskExecutionResource taskExecutionStatus(long id); + TaskExecutionResource taskExecutionStatus(long id, String schemaTarget); /** * Return the task execution log. The platform from which to retrieve the log will be set to {@code default}. @@ -150,7 +151,7 @@ public interface TaskOperations { * * @param id identifier of the task execution */ - void cleanup(long id); + void cleanup(long id, String schemaTarget); /** * Cleanup any resources associated with the execution for the id specified. @@ -158,7 +159,7 @@ public interface TaskOperations { * @param id identifier of the task execution * @param removeData delete the history of the execution */ - void cleanup(long id, boolean removeData); + void cleanup(long id, String schemaTarget, boolean removeData); /** diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java index 77c92127e3..5bcd2dcb70 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/TaskTemplate.java @@ -16,15 +16,16 @@ package org.springframework.cloud.dataflow.rest.client; +import javax.naming.OperationNotSupportedException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; - -import javax.naming.OperationNotSupportedException; +import java.util.Objects; import org.springframework.cloud.dataflow.rest.client.support.VersionUtils; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.LauncherResource; import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; @@ -66,6 +67,8 @@ public class TaskTemplate implements TaskOperations { private static final String EXECUTION_RELATION = "tasks/executions/execution"; + private static final String EXECUTION_LAUNCH_RELATION = "tasks/executions/launch"; + private static final String EXECUTION_RELATION_BY_NAME = "tasks/executions/name"; private static final String EXECUTIONS_INFO_RELATION = "tasks/info/executions"; @@ -86,6 +89,8 @@ public class TaskTemplate implements TaskOperations { private final Link executionLink; + private final Link executionLaunchLink; + private final Link executionByNameLink; private final Link executionsCurrentLink; @@ -106,11 +111,12 @@ public class TaskTemplate implements TaskOperations { Assert.notNull(resources.getLink(DEFINITIONS_RELATION), "Definitions relation is required"); Assert.notNull(resources.getLink(DEFINITION_RELATION), "Definition relation is required"); Assert.notNull(restTemplate, "RestTemplate must not be null"); - Assert.notNull(resources.getLink(EXECUTIONS_RELATION), "Executions relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION), "Execution relation is required"); - Assert.notNull(resources.getLink(EXECUTION_RELATION_BY_NAME), "Execution by name relation is required"); + Assert.isTrue(resources.getLink(EXECUTIONS_RELATION).isPresent(), "Executions relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_RELATION).isPresent(), "Execution relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_LAUNCH_RELATION).isPresent(), "Execution launch relation is required"); + Assert.isTrue(resources.getLink(EXECUTION_RELATION_BY_NAME).isPresent(), "Execution by name relation is required"); Assert.notNull(dataFlowServerVersion, "dataFlowVersion must not be null"); - Assert.notNull(resources.getLink(RETRIEVE_LOG), "Log relation is required"); + Assert.isTrue(resources.getLink(RETRIEVE_LOG).isPresent(), "Log relation is required"); this.dataFlowServerVersion = dataFlowServerVersion; @@ -131,6 +137,7 @@ public class TaskTemplate implements TaskOperations { this.definitionLink = resources.getLink(DEFINITION_RELATION).get(); this.executionsLink = resources.getLink(EXECUTIONS_RELATION).get(); this.executionLink = resources.getLink(EXECUTION_RELATION).get(); + this.executionLaunchLink = resources.getLink(EXECUTION_LAUNCH_RELATION).get(); this.executionByNameLink = resources.getLink(EXECUTION_RELATION_BY_NAME).get(); this.executionsCurrentLink = resources.getLink(EXECUTIONS_CURRENT_RELATION).get(); if (resources.getLink(EXECUTIONS_INFO_RELATION).isPresent()) { @@ -157,33 +164,44 @@ public LauncherResource.Page listPlatforms() { @Override public TaskDefinitionResource create(String name, String definition, String description) { - MultiValueMap values = new LinkedMultiValueMap(); + MultiValueMap values = new LinkedMultiValueMap<>(); values.add("name", name); values.add("definition", definition); values.add("description", description); - return restTemplate.postForObject(definitionsLink.expand().getHref(), values, - TaskDefinitionResource.class); + return restTemplate.postForObject(definitionsLink.expand().getHref(), values, + TaskDefinitionResource.class); } @Override - public long launch(String name, Map properties, List arguments) { + public LaunchResponseResource launch(String name, Map properties, List arguments) { MultiValueMap values = new LinkedMultiValueMap<>(); - values.add("properties", DeploymentPropertiesUtils.format(properties)); - values.add("arguments", StringUtils.collectionToDelimitedString(arguments, " ")); - return restTemplate.postForObject(executionByNameLink.expand(name).getHref(), values, Long.class, name); + String formattedProperties = DeploymentPropertiesUtils.format(properties); + String commandLineArguments = StringUtils.collectionToDelimitedString(arguments, " "); + values.add("properties", formattedProperties); + values.add("arguments", commandLineArguments); + values.add("name", name); + String url = executionLaunchLink.expand(name).getHref(); + values.remove("name"); + return restTemplate.postForObject(url, values, LaunchResponseResource.class); } @Override - public void stop(String ids) { + public void stop(String ids, String schemaTarget) { MultiValueMap values = new LinkedMultiValueMap<>(); - restTemplate.postForLocation(executionLink.expand(ids).getHref(),values); + if (StringUtils.hasText(schemaTarget)) { + values.add("schemaTarget", schemaTarget); + } + restTemplate.postForLocation(executionLink.expand(ids).getHref(), values); } @Override - public void stop(String ids, String platform) { + public void stop(String ids, String schemaTarget, String platform) { MultiValueMap values = new LinkedMultiValueMap<>(); values.add("platform", platform); - restTemplate.postForLocation(executionLink.expand(ids).getHref(),values); + if (StringUtils.hasText(schemaTarget)) { + values.add("schemaTarget", schemaTarget); + } + restTemplate.postForLocation(executionLink.expand(ids).getHref(), values); } @Override @@ -214,8 +232,14 @@ public TaskExecutionResource.Page executionListByTaskName(String taskName) { } @Override - public TaskExecutionResource taskExecutionStatus(long id) { - return restTemplate.getForObject(executionLink.expand(id).getHref(), TaskExecutionResource.class); + public TaskExecutionResource taskExecutionStatus(long id, String schemaTarget) { + MultiValueMap values = new LinkedMultiValueMap<>(); + values.add("id", id); + if (StringUtils.hasText(schemaTarget)) { + values.add("schemaTarget", schemaTarget); + } + String url = executionLink.expand(values).getHref(); + return restTemplate.getForObject(url, TaskExecutionResource.class); } @Override @@ -225,8 +249,8 @@ public String taskExecutionLog(String externalExecutionId) { @Override public String taskExecutionLog(String externalExecutionId, String platform) { - Map map = new HashMap<>(); - map.put("taskExternalExecutionId",externalExecutionId); + Map map = new HashMap<>(); + map.put("taskExternalExecutionId", externalExecutionId); map.put("platformName", platform); return restTemplate.getForObject(retrieveLogLink.expand(map).getHref(), String.class); } @@ -234,24 +258,28 @@ public String taskExecutionLog(String externalExecutionId, String platform) { @Override public Collection currentTaskExecutions() { ParameterizedTypeReference> typeReference = - new ParameterizedTypeReference>() { - }; + new ParameterizedTypeReference>() { + }; return restTemplate - .exchange(executionsCurrentLink.getHref(),HttpMethod.GET,null, typeReference).getBody(); + .exchange(executionsCurrentLink.getHref(), HttpMethod.GET, null, typeReference).getBody(); } @Override - public void cleanup(long id) { - cleanup(id, false); + public void cleanup(long id, String schemaTarget) { + cleanup(id, schemaTarget, false); } @Override - public void cleanup(long id, boolean removeData) { - String uriTemplate = executionLink.expand(id).getHref(); - if (removeData) { - uriTemplate = uriTemplate + "?action=CLEANUP,REMOVE_DATA"; - } - restTemplate.delete(uriTemplate); + public void cleanup(long id, String schemaTarget, boolean removeData) { + MultiValueMap values = new LinkedMultiValueMap<>(); + if (StringUtils.hasText(schemaTarget)) { + values.add("schemaTarget", schemaTarget); + } + String uriTemplate = executionLink.expand(id).getHref(); + if (removeData) { + values.add("action", "CLEANUP,REMOVE_DATA"); + } + restTemplate.delete(uriTemplate, values); } @Override @@ -268,13 +296,13 @@ public void cleanupAllTaskExecutions(boolean completed, String taskName) { @Override public Integer getAllTaskExecutionsCount(boolean completed, String taskName) { - Map map = new HashMap<>(); + Map map = new HashMap<>(); map.put("completed", String.valueOf(completed)); map.put("name", StringUtils.hasText(taskName) ? taskName : ""); if (this.executionsInfoLink != null) { - return restTemplate - .getForObject(this.executionsInfoLink.expand(map).getHref(), TaskExecutionsInfoResource.class) - .getTotalExecutions(); + return Objects.requireNonNull( + restTemplate.getForObject(this.executionsInfoLink.expand(map).getHref(), TaskExecutionsInfoResource.class) + ).getTotalExecutions(); } // for backwards-compatibility return zero count return 0; diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java index 2d14dce170..75cbf546ec 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientAutoConfiguration.java @@ -21,6 +21,7 @@ import java.util.List; import java.util.Map; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -95,7 +96,7 @@ public DataFlowClientAutoConfiguration(@Nullable RestTemplate restTemplate) { @Bean @ConditionalOnMissingBean(DataFlowOperations.class) - public DataFlowOperations dataFlowOperations() throws Exception{ + public DataFlowOperations dataFlowOperations(@Nullable ObjectMapper mapper) throws Exception{ RestTemplate template = DataFlowTemplate.prepareRestTemplate(restTemplate); final HttpClientConfigurer httpClientConfigurer = HttpClientConfigurer.create(new URI(properties.getServerUri())) .skipTlsCertificateVerification(properties.isSkipSslValidation()); @@ -130,7 +131,7 @@ else if (oauth2ClientProperties != null && !oauth2ClientProperties.getRegistrati logger.debug("Not configuring security for accessing the Data Flow Server"); } - return new DataFlowTemplate(new URI(properties.getServerUri()), template); + return new DataFlowTemplate(new URI(properties.getServerUri()), template, mapper); } @Bean diff --git a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java index ee019ace3e..3c0e622ab2 100644 --- a/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java +++ b/spring-cloud-dataflow-rest-client/src/main/java/org/springframework/cloud/dataflow/rest/client/dsl/task/Task.java @@ -22,6 +22,8 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -30,6 +32,7 @@ import org.springframework.cloud.dataflow.rest.client.TaskOperations; import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; @@ -39,7 +42,7 @@ /** * Represents a Task defined on DataFlow server. New Task can be defined with the help of a fluent style builder * pattern or use the {@link Task} static utility methods to retrieve existing tasks already defined in DataFlow. - * + *

    * For for instance you can define a new task like this: *

      *     {@code
    @@ -50,7 +53,7 @@
      *              .build();
      *     }
      * 
    - * + *

    * Next you can launch the task and inspect the executions result. Mind that the task is run asynchronously. *

      *     import org.awaitility.Awaitility;
    @@ -65,7 +68,7 @@
      *          task.executions().forEach( execution -> System.out.println(execution.getExitCode()));
      *     }
      * 
    - * + *

    * Use

    {@code close()}
    to destroy the task manually. Since tasks are auto-closable you can use the * Java try block instead: *
    @@ -82,7 +85,7 @@
      *          } // Task is destroyed.
      *     }
      * 
    - * + *

    * Use the {@link TaskBuilder#allTasks()} and {@link TaskBuilder#findByName(String)} * static helper methods to list or retrieve existing tasks defined in DataFlow. * @@ -90,8 +93,11 @@ */ public class Task implements AutoCloseable { private final String taskName; + private final TaskOperations taskOperations; + private final JobOperations jobOperations; + private final DataFlowOperations dataFlowOperations; Task(String taskName, DataFlowOperations dataFlowOperations) { @@ -107,6 +113,7 @@ public class Task implements AutoCloseable { /** * Fluent API method to create a {@link TaskBuilder}. + * * @param dataFlowOperations {@link DataFlowOperations} Data Flow Rest client instance. * @return A fluent style builder to create tasks. */ @@ -116,28 +123,31 @@ public static TaskBuilder builder(DataFlowOperations dataFlowOperations) { /** * Launch a task without properties or arguments. + * * @return long containing the TaskExecutionId */ - public long launch() { - return this.launch(Collections.EMPTY_LIST); + public LaunchResponseResource launch() { + return this.launch(Collections.emptyList()); } /** * Launch a task with command line arguments. + * * @param arguments the command line arguments. * @return long containing the TaskExecutionId */ - public long launch(List arguments) { - return this.launch(Collections.EMPTY_MAP, arguments); + public LaunchResponseResource launch(List arguments) { + return this.launch(Collections.emptyMap(), arguments); } /** * Launch a task with deployment properties and command line arguments. + * * @param properties the deployment properties. - * @param arguments the command line arguments. + * @param arguments the command line arguments. * @return long containing the TaskExecutionId */ - public long launch(Map properties, List arguments) { + public LaunchResponseResource launch(Map properties, List arguments) { if (properties == null) { throw new IllegalArgumentException("Task properties can't be null!"); } @@ -146,33 +156,35 @@ public long launch(Map properties, List arguments) { /** * Stop all Tasks' running {@link org.springframework.cloud.task.repository.TaskExecution}s. - * + *

    * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! */ public void stop() { - String commaSeparatedIds = executions().stream() + Map> idTargets = executions().stream() .filter(Objects::nonNull) .filter(e -> e.getTaskExecutionStatus() == TaskExecutionStatus.RUNNING) - .map(TaskExecutionResource::getExecutionId) - .map(String::valueOf) - .collect(Collectors.joining(",")); - if (StringUtils.hasText(commaSeparatedIds)) { - this.taskOperations.stop(commaSeparatedIds); - } + .collect(Collectors.groupingBy(TaskExecutionResource::getSchemaTarget, Collectors.toSet())); + idTargets.forEach((schemaTarget, tasks) -> { + String ids = tasks.stream() + .map(taskExecutionResource -> String.valueOf(taskExecutionResource.getExecutionId())) + .collect(Collectors.joining(",")); + this.taskOperations.stop(ids, schemaTarget); + }); } /** * Stop a list of {@link org.springframework.cloud.task.repository.TaskExecution}s. - * @param taskExecutionIds List of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. * - * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! + * @param taskExecutionIds List of {@link org.springframework.cloud.task.repository.TaskExecution} ids to stop. + *

    + * Note: this functionality is platform dependent! It works for local platform but does nothing on K8s! */ - public void stop(long... taskExecutionIds) { + public void stop(String schemaTarget, long... taskExecutionIds) { String commaSeparatedIds = Stream.of(taskExecutionIds) .map(String::valueOf) .collect(Collectors.joining(",")); if (StringUtils.hasText(commaSeparatedIds)) { - this.taskOperations.stop(commaSeparatedIds); + this.taskOperations.stop(commaSeparatedIds, schemaTarget); } } @@ -189,6 +201,7 @@ public void destroy() { /** * List task executions for this task. + * * @return List of task executions for the given task. */ public Collection executions() { @@ -197,35 +210,35 @@ public Collection executions() { /** * Retrieve task execution by Id. + * * @param executionId Task execution Id * @return Task executions for the given task execution id. */ - public Optional execution(long executionId) { - return this.executions().stream() - .filter(Objects::nonNull) - .filter(e -> e.getExecutionId() == executionId) - .findFirst(); + public Optional execution(long executionId, String schemaTarget) { + return Optional.ofNullable(this.taskOperations.taskExecutionStatus(executionId, schemaTarget)); } /** * Find {@link TaskExecutionResource} by a parent execution id. + * * @param parentExecutionId parent task execution id. * @return Return TaskExecutionResource */ - public Optional executionByParentExecutionId(long parentExecutionId) { + public Optional executionByParentExecutionId(long parentExecutionId, String schemaTarget) { return this.executions().stream() .filter(Objects::nonNull) - .filter(e -> e.getParentExecutionId() == parentExecutionId) + .filter(e -> e.getParentExecutionId() == parentExecutionId && e.getSchemaTarget().equals(schemaTarget)) .findFirst(); } /** * Task execution status + * * @param executionId execution Id * @return returns the task execution status. */ - public TaskExecutionStatus executionStatus(long executionId) { - return this.execution(executionId) + public TaskExecutionStatus executionStatus(long executionId, String schemaTarget) { + return this.execution(executionId, schemaTarget) .map(TaskExecutionResource::getTaskExecutionStatus) .orElse(TaskExecutionStatus.UNKNOWN); } @@ -275,8 +288,8 @@ public Collection jobExecutionResources() { * @param jobExecutionId the job execution id. * @return Returns list of {@link StepExecutionResource} belonging to the job. */ - public Collection jobStepExecutions(long jobExecutionId) { - return this.jobOperations.stepExecutionList(jobExecutionId).getContent(); + public Collection jobStepExecutions(long jobExecutionId, String schemaTarget) { + return this.jobOperations.stepExecutionList(jobExecutionId, schemaTarget).getContent(); } /** @@ -311,10 +324,11 @@ public void close() { /** * Remove specified task execution for the specified task execution id. + * * @param taskExecutionId the id of the task execution to be removed. */ - public void cleanupTaskExecution(long taskExecutionId) { - this.taskOperations.cleanup(taskExecutionId, true); + public void cleanupTaskExecution(long taskExecutionId, String schemaTarget) { + this.taskOperations.cleanup(taskExecutionId, schemaTarget, true); } /** @@ -326,12 +340,13 @@ public void cleanupAllTaskExecutions() { /** * Retrieve task executions for child task name associated with this task's instance. + * * @param childTaskName to be used to search for the associated task executions. * @return List of task executions for the given child task. */ public Optional composedTaskChildExecution(String childTaskName) { Collection taskExecutions = taskOperations.executionListByTaskName(this.taskName + "-" + childTaskName).getContent(); - return (taskExecutions.size() == 1) ? Optional.of((TaskExecutionResource) taskExecutions.stream().toArray()[0]): Optional.empty(); + return (taskExecutions.size() == 1) ? Optional.of((TaskExecutionResource) taskExecutions.stream().toArray()[0]) : Optional.empty(); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java index 9cf3fcc411..712eda3b7b 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/DataflowTemplateTests.java @@ -23,6 +23,8 @@ import java.util.Optional; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jdk8.Jdk8Module; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -37,8 +39,10 @@ import org.springframework.cloud.dataflow.rest.Version; import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; import org.springframework.cloud.dataflow.rest.resource.RootResource; +import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; import org.springframework.hateoas.Link; import org.springframework.hateoas.LinkRelation; +import org.springframework.hateoas.mediatype.hal.Jackson2HalModule; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.web.client.ResourceAccessException; @@ -60,8 +64,14 @@ */ public class DataflowTemplateTests { + private ObjectMapper mapper; + @Before public void setup() { + mapper.registerModule(new Jdk8Module()); + mapper.registerModule(new Jackson2HalModule()); + mapper.registerModule(new JavaTimeModule()); + mapper.registerModule(new Jackson2DataflowModule()); System.setProperty("sun.net.client.defaultConnectTimeout", String.valueOf(100)); } @@ -74,7 +84,7 @@ public void shutdown() { public void testDataFlowTemplateContructorWithNullUri() throws URISyntaxException { try { - new DataFlowTemplate(null); + new DataFlowTemplate(null, mapper); } catch (IllegalArgumentException e) { assertEquals("The provided baseURI must not be null.", e.getMessage()); @@ -86,7 +96,7 @@ public void testDataFlowTemplateContructorWithNullUri() throws URISyntaxExceptio @Test(expected = ResourceAccessException.class) public void testDataFlowTemplateContructorWithNonExistingUri() throws URISyntaxException { - new DataFlowTemplate(new URI("https://doesnotexist:1234")); + new DataFlowTemplate(new URI("https://doesnotexist:1234"), mapper); } @Test @@ -259,6 +269,6 @@ private DataFlowTemplate getMockedDataFlowTemplate(boolean isLinksActive) throws converters.add(new MappingJackson2HttpMessageConverter()); when(restTemplate.getMessageConverters()).thenReturn(converters); URI uri = new URI("foo"); - return new DataFlowTemplate(uri, restTemplate); + return new DataFlowTemplate(uri, restTemplate, mapper); } } diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java index 59205c8b3c..2672fa150d 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/TaskTemplateTests.java @@ -44,7 +44,6 @@ public class TaskTemplateTests { @Before public void setup() { restTemplate = mock(RestTemplate.class); - } @Test @@ -77,10 +76,11 @@ private void validateExecutionLinkNotPresent(String version) { Assert.assertFalse(testResource.isLinkRequested(CURRENT_TASK_EXECUTION_LINK)); } - public static class TestResource extends RepresentationModel { + public static class TestResource extends RepresentationModel { - private Map linksRequested = new HashMap<>(); + private final Map linksRequested = new HashMap<>(); + @Override public Optional getLink(String rel) { if (this.linksRequested.containsKey(rel)) { Long count = this.linksRequested.get(rel); diff --git a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java index def6ed7f98..d87b76e5c0 100644 --- a/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java +++ b/spring-cloud-dataflow-rest-client/src/test/java/org/springframework/cloud/dataflow/rest/client/config/DataFlowClientPropertiesTests.java @@ -89,7 +89,7 @@ public void testLegacyOauth() { assertThat(properties.getAuthentication().getClientId()).isEqualTo("id1"); assertThat(properties.getAuthentication().getClientSecret()).isEqualTo("secret1"); assertThat(properties.getAuthentication().getTokenUri()).isEqualTo("uri1"); - assertThat(properties.getAuthentication().getScope()).containsExactly("s1", "s2"); + assertThat(properties.getAuthentication().getScope()).containsExactlyInAnyOrder("s1", "s2"); }); } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java index 2c029d36da..0c2666e7ff 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecution.java @@ -33,16 +33,19 @@ public class TaskJobExecution { private final int stepExecutionCount; - public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined) { - this(taskId, jobExecution, isTaskDefined, 0); + private final String schemaTarget; + + public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined, String schemaTarget) { + this(taskId, jobExecution, isTaskDefined, 0, schemaTarget); } - public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined, int stepExecutionCount) { + public TaskJobExecution(long taskId, JobExecution jobExecution, boolean isTaskDefined, int stepExecutionCount, String schemaTarget) { Assert.notNull(jobExecution, "jobExecution must not be null"); this.taskId = taskId; this.jobExecution = jobExecution; this.isTaskDefined = isTaskDefined; this.stepExecutionCount = stepExecutionCount; + this.schemaTarget = schemaTarget; } /** @@ -74,4 +77,19 @@ public boolean isTaskDefined() { public int getStepExecutionCount() { return stepExecutionCount; } + + public String getSchemaTarget() { + return schemaTarget; + } + + @Override + public String toString() { + return "TaskJobExecution{" + + "taskId=" + taskId + + ", isTaskDefined=" + isTaskDefined + + ", jobExecution=" + jobExecution + + ", stepExecutionCount=" + stepExecutionCount + + ", schemaTarget='" + schemaTarget + '\'' + + '}'; + } } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java index e8a8c0b837..7c08746f4a 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/job/TaskJobExecutionRel.java @@ -20,7 +20,9 @@ import java.util.Collections; import java.util.List; + import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.util.Assert; @@ -32,7 +34,7 @@ */ public class TaskJobExecutionRel { - private final TaskExecution taskExecution; + private final AggregateTaskExecution taskExecution; private final List jobExecutionIds; @@ -49,7 +51,7 @@ public class TaskJobExecutionRel { * @param taskManifest to be associated with the task execution. * @param composedTaskJobExecution to be associated with the task execution. */ - public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionIds, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { + public TaskJobExecutionRel(AggregateTaskExecution taskExecution, List jobExecutionIds, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); this.taskExecution = taskExecution; this.taskManifest = taskManifest; @@ -66,7 +68,7 @@ public TaskJobExecutionRel(TaskExecution taskExecution, List jobExecutionI /** * @return the taskExecution for this relationship. */ - public TaskExecution getTaskExecution() { + public AggregateTaskExecution getTaskExecution() { return taskExecution; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java index 11a754a683..e224c73cac 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AppRegistrationResource.java @@ -16,10 +16,9 @@ package org.springframework.cloud.dataflow.rest.resource; -import java.util.HashSet; import java.util.Set; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java index 42576746e3..f420a12fd0 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/DetailedAppRegistrationResource.java @@ -24,7 +24,7 @@ import java.util.Set; import org.springframework.boot.configurationmetadata.ConfigurationMetadataProperty; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.PagedModel; /** diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java index cc5dd8038d..605342983c 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/JobExecutionResource.java @@ -1,5 +1,5 @@ /* - * Copyright 2016-2019 the original author or authors. + * Copyright 2016-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class JobExecutionResource extends RepresentationModel { @@ -86,6 +87,8 @@ public class JobExecutionResource extends RepresentationModel { + private long executionId; + private String schemaTarget; + + public LaunchResponseResource() { + } + + public LaunchResponseResource(long executionId, String schemaTarget) { + this.executionId = executionId; + this.schemaTarget = schemaTarget; + } + + public long getExecutionId() { + return executionId; + } + + public String getSchemaTarget() { + return schemaTarget; + } + + public void setExecutionId(long executionId) { + this.executionId = executionId; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } +} diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetResource.java index 0dbf88d03d..022037eca4 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetResource.java @@ -16,11 +16,11 @@ package org.springframework.cloud.dataflow.rest.resource; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.hateoas.RepresentationModel; /** - * Resource for {@link org.springframework.cloud.dataflow.core.SchemaVersionTarget} + * Resource for {@link org.springframework.cloud.dataflow.schema.SchemaVersionTarget} * @author Corneil du Plessis */ public class SchemaVersionTargetResource extends RepresentationModel { diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetsResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetsResource.java index 2dd010a4e6..8dd4d146f8 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetsResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/SchemaVersionTargetsResource.java @@ -21,7 +21,7 @@ import org.springframework.hateoas.RepresentationModel; /** - * Resource for {@link org.springframework.cloud.dataflow.core.SchemaVersionTargets} + * Resource for {@link org.springframework.cloud.dataflow.schema.SchemaVersionTargets} * @author Corneil du Plessis */ public class SchemaVersionTargetsResource extends RepresentationModel { diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java index 3058f81999..206fd21356 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/StepExecutionResource.java @@ -32,6 +32,8 @@ public class StepExecutionResource extends RepresentationModel { } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java index 441ff343da..690cf50ba9 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResource.java @@ -23,9 +23,11 @@ import java.util.Map; import org.springframework.batch.core.JobExecution; + import org.springframework.cloud.dataflow.core.TaskManifest; import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.RepresentationModel; @@ -38,6 +40,7 @@ * @author Glenn Renfro * @author Gunnar Hillert * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class TaskExecutionResource extends RepresentationModel { @@ -119,6 +122,8 @@ public class TaskExecutionResource extends RepresentationModel(); } @@ -142,6 +147,8 @@ public TaskExecutionResource(TaskJobExecutionRel taskJobExecutionRel) { this.endTime = taskJobExecutionRel.getTaskExecution().getEndTime(); this.errorMessage = taskJobExecutionRel.getTaskExecution().getErrorMessage(); this.externalExecutionId = taskJobExecutionRel.getTaskExecution().getExternalExecutionId(); + this.schemaTarget = taskJobExecutionRel.getTaskExecution().getSchemaTarget(); + this.platformName = taskJobExecutionRel.getTaskExecution().getPlatformName(); if (taskJobExecutionRel.getJobExecutionIds() == null) { this.jobExecutionIds = Collections.emptyList(); } @@ -169,11 +176,12 @@ public TaskExecutionResource(TaskJobExecutionRel taskJobExecutionRel) { * @param taskExecution contains the {@link TaskExecution} * @param composedTaskJobExecution the optional composed task execution. */ - public TaskExecutionResource(TaskExecution taskExecution, TaskJobExecution composedTaskJobExecution) { + public TaskExecutionResource(AggregateTaskExecution taskExecution, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); this.executionId = taskExecution.getExecutionId(); this.exitCode = taskExecution.getExitCode(); this.taskName = taskExecution.getTaskName(); + this.schemaTarget = taskExecution.getSchemaTarget(); this.exitMessage = taskExecution.getExitMessage(); this.arguments = Collections.unmodifiableList(taskExecution.getArguments()); this.startTime = taskExecution.getStartTime(); @@ -193,12 +201,13 @@ public TaskExecutionResource(TaskExecution taskExecution, TaskJobExecution compo * @param taskManifest contains the (@link TaskManifest} * @param composedTaskJobExecution The optional composed task execution. */ - public TaskExecutionResource(TaskExecution taskExecution, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { + public TaskExecutionResource(AggregateTaskExecution taskExecution, TaskManifest taskManifest, TaskJobExecution composedTaskJobExecution) { Assert.notNull(taskExecution, "taskExecution must not be null"); Assert.notNull(taskManifest, "taskManifest must not be null"); this.executionId = taskExecution.getExecutionId(); this.exitCode = taskExecution.getExitCode(); this.taskName = taskExecution.getTaskName(); + this.schemaTarget = taskExecution.getSchemaTarget(); this.exitMessage = taskExecution.getExitMessage(); this.arguments = Collections.unmodifiableList(taskExecution.getArguments()); this.startTime = taskExecution.getStartTime(); @@ -281,6 +290,14 @@ public void setPlatformName(String platformName) { this.platformName = platformName; } + public String getSchemaTarget() { + return schemaTarget; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } + public void setTaskExecutionStatus(String taskExecutionStatus) { this.taskExecutionStatus = taskExecutionStatus; } diff --git a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java index fa6ce275db..cd7d7d8d2a 100644 --- a/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java +++ b/spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtils.java @@ -113,23 +113,25 @@ public static List parseParamList(String s, String delimiter) { // get raw candidates as simple comma split String[] candidates = StringUtils.delimitedListToStringArray(s, delimiter); for (int i = 0; i < candidates.length; i++) { - if (i > 0 && !candidates[i].contains("=") || (i > 0 && candidates[i].contains("=") && !startsWithDeploymentPropertyPrefix(candidates[i]))) { - // we don't have '=' so this has to be latter parts of - // a comma delimited value, append it to previously added - // key/value pair. - // we skip first as we would not have anything to append to. this - // would happen if dep prop string is malformed and first given - // key/value pair is not actually a key/value. - pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidates[i]); - } - else { - // we have a key/value pair having '=', or malformed first pair - if (!startsWithDeploymentPropertyPrefix(candidates[i])) { - throw new IllegalArgumentException( - "Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.'" + - " allowed."); + String candidate = candidates[i]; + if(StringUtils.hasText(candidate)) { + if (i > 0 && !candidate.contains("=") || (i > 0 && candidate.contains("=") && !startsWithDeploymentPropertyPrefix(candidate))) { + // we don't have '=' so this has to be latter parts of + // a comma delimited value, append it to previously added + // key/value pair. + // we skip first as we would not have anything to append to. this + // would happen if dep prop string is malformed and first given + // key/value pair is not actually a key/value. + pairs.set(pairs.size() - 1, pairs.get(pairs.size() - 1) + delimiter + candidate); + } else { + // we have a key/value pair having '=', or malformed first pair + if (!startsWithDeploymentPropertyPrefix(candidate)) { + throw new IllegalArgumentException( + "Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.'" + + " allowed. Not " + candidate); + } + pairs.add(candidate); } - pairs.add(candidates[i]); } } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java index e7c25cb710..468e71398b 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/resource/TaskExecutionResourceTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2018-2021 the original author or authors. + * Copyright 2018-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,20 +20,23 @@ import java.util.Collections; import java.util.Date; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobExecution; import org.springframework.cloud.dataflow.core.TaskManifest; import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; -import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.core.io.UrlResource; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.assertj.core.api.Assertions.assertThat; /** * Provides tests for the {@link TaskExecutionResourceTests} class. @@ -41,106 +44,131 @@ * @author Gunnar Hillert * @author Ilayaperumal Gopinathan * @author Glenn Renfro + * @author Corneil du Plessis */ public class TaskExecutionResourceTests { @Test - public void testTaskExecutionStatusWithNoTaskExecutionSet() { + public void testTaskExecutionStatusWithNoTaskExecutionSet() { final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(); - assertEquals(TaskExecutionStatus.UNKNOWN, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.UNKNOWN); } @Test - public void testTaskExecutionStatusWithNoStartTime() { - final TaskExecution taskExecution = new TaskExecution(); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.UNKNOWN, taskExecutionResource.getTaskExecutionStatus()); + public void testTaskExecutionStatusWithNoStartTime() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); + taskExecution.setSchemaTarget(target.getName()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.UNKNOWN); + } } @Test - public void testTaskExecutionStatusWithRunningTaskExecution() { - final TaskExecution taskExecution = new TaskExecution(); - taskExecution.setStartTime(new Date()); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.RUNNING, taskExecutionResource.getTaskExecutionStatus()); - assertNull(taskExecutionResource.getExitCode()); + public void testTaskExecutionStatusWithRunningTaskExecution() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); + taskExecution.setSchemaTarget(target.getName()); + taskExecution.setStartTime(new Date()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.RUNNING); + assertThat(taskExecutionResource.getExitCode()).isNull(); + } } @Test - public void testTaskExecutionStatusWithSuccessfulTaskExecution() { - final TaskExecution taskExecution = getDefaultTaskExecution(); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + public void testTaskExecutionStatusWithSuccessfulTaskExecution() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = getDefaultTaskExecution(target.getName()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + } } @Test - public void testCTRExecutionStatusWithSuccessfulJobExecution() { - final TaskExecution taskExecution = getDefaultTaskExecution(); - JobExecution jobExecution = new JobExecution(1L); - jobExecution.setExitStatus(ExitStatus.COMPLETED); - TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true); - final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + public void testCTRExecutionStatusWithSuccessfulJobExecution() { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + final AggregateTaskExecution taskExecution = getDefaultTaskExecution(target.getName()); + JobExecution jobExecution = new JobExecution(1L); + jobExecution.setExitStatus(ExitStatus.COMPLETED); + TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true, target.getName()); + final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + + } } @Test - public void testCTRExecutionStatusWithFailedJobExecution() { - final TaskExecution taskExecution = new TaskExecution(); + public void testCTRExecutionStatusWithFailedJobExecution() { + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); taskExecution.setStartTime(new Date()); taskExecution.setEndTime(new Date()); taskExecution.setExitCode(0); JobExecution jobExecution = new JobExecution(1L); jobExecution.setExitStatus(ExitStatus.FAILED); - TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true); + final String defaultSchemaTarget = SchemaVersionTarget.defaultTarget().getName(); + TaskJobExecution taskJobExecution = new TaskJobExecution(taskExecution.getExecutionId(), jobExecution, true, defaultSchemaTarget); final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, taskJobExecution); - assertEquals(TaskExecutionStatus.ERROR, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); } @Test - public void testTaskExecutionStatusWithFailedTaskExecution() { - final TaskExecution taskExecution = new TaskExecution(); + public void testTaskExecutionStatusWithFailedTaskExecution() { + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); taskExecution.setStartTime(new Date()); taskExecution.setEndTime(new Date()); taskExecution.setExitCode(123); final TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskExecution, null); - assertEquals(TaskExecutionStatus.ERROR, taskExecutionResource.getTaskExecutionStatus()); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); } @Test - public void testTaskExecutionForTaskExecutionRel() throws Exception{ - final TaskExecution taskExecution = getDefaultTaskExecution(); - TaskManifest taskManifest = new TaskManifest(); - taskManifest.setPlatformName("testplatform"); - taskManifest.setTaskDeploymentRequest(new AppDeploymentRequest(new AppDefinition("testapp", Collections.emptyMap()), new UrlResource("http://foo"))); - TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), taskManifest, null); - TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertEquals("testplatform", taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); - taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, null); - taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertNull(taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); - JobExecution jobExecution = new JobExecution(1L, null, "foo"); - jobExecution.setExitStatus(ExitStatus.FAILED); - - TaskJobExecution ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true); - taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); - taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertNull(taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.ERROR, taskExecutionResource.getTaskExecutionStatus()); - jobExecution.setExitStatus(ExitStatus.COMPLETED); - ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true); - taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); - taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); - assertNull(taskExecutionResource.getPlatformName()); - assertEquals(TaskExecutionStatus.COMPLETE, taskExecutionResource.getTaskExecutionStatus()); + public void testTaskExecutionForTaskExecutionRel() throws Exception { + for (AppBootSchemaVersion version : AppBootSchemaVersion.values()) { + SchemaVersionTarget target = SchemaVersionTarget.createDefault(version); + + final AggregateTaskExecution taskExecution = getDefaultTaskExecution(target.getName()); + TaskManifest taskManifest = new TaskManifest(); + taskManifest.setPlatformName("testplatform"); + taskManifest.setTaskDeploymentRequest(new AppDeploymentRequest(new AppDefinition("testapp", Collections.emptyMap()), new UrlResource("http://foo"))); + TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), taskManifest, null); + TaskExecutionResource taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isEqualTo("testplatform"); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, null); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + JobExecution jobExecution = new JobExecution(1L, null, "foo"); + jobExecution.setExitStatus(ExitStatus.FAILED); + + TaskJobExecution ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true, target.getName()); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.ERROR); + jobExecution.setExitStatus(ExitStatus.COMPLETED); + ctrTaskJobExecution = new TaskJobExecution(1, jobExecution, true, target.getName()); + taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, new ArrayList<>(), null, ctrTaskJobExecution); + taskExecutionResource = new TaskExecutionResource(taskJobExecutionRel); + assertThat(taskExecutionResource.getPlatformName()).isNull(); + assertThat(taskExecutionResource.getTaskExecutionStatus()).isEqualTo(TaskExecutionStatus.COMPLETE); + } } - private TaskExecution getDefaultTaskExecution() { - final TaskExecution taskExecution = new TaskExecution(); + private AggregateTaskExecution getDefaultTaskExecution(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + final AggregateTaskExecution taskExecution = new AggregateTaskExecution(); taskExecution.setStartTime(new Date()); taskExecution.setEndTime(new Date()); taskExecution.setExitCode(0); + taskExecution.setSchemaTarget(schemaTarget); return taskExecution; } diff --git a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java index fd85fd98c6..af880d3319 100644 --- a/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java +++ b/spring-cloud-dataflow-rest-resource/src/test/java/org/springframework/cloud/dataflow/rest/util/DeploymentPropertiesUtilsTests.java @@ -87,7 +87,7 @@ public void testDeploymentPropertiesParsing() { fail("Illegal Argument Exception expected."); } catch (Exception e) { - assertTrue(e.getMessage().equals("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed.")); + assertThat(e.getMessage()).isEqualTo("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed. Not invalidkeyvalue"); } props = DeploymentPropertiesUtils.parse("deployer.foo=bar,invalidkeyvalue2"); @@ -124,7 +124,7 @@ public void testDeploymentPropertiesParsing2() { fail("Illegal Argument Exception expected."); } catch (Exception e) { - assertTrue(e.getMessage().equals("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed.")); + assertThat(e.getMessage()).isEqualTo("Only deployment property keys starting with 'app.' or 'scheduler' or 'deployer.' or 'version.' allowed. Not a=b"); } props = DeploymentPropertiesUtils.parseArgumentList("a=b c=d", " "); diff --git a/spring-cloud-dataflow-schema-core/pom.xml b/spring-cloud-dataflow-schema-core/pom.xml new file mode 100644 index 0000000000..6d7bbb39e8 --- /dev/null +++ b/spring-cloud-dataflow-schema-core/pom.xml @@ -0,0 +1,70 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.0-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-schema-core + jar + + true + + + + org.springframework + spring-core + + + org.springframework + spring-context + compile + + + org.springframework.cloud + spring-cloud-task-batch + + + org.springframework.hateoas + spring-hateoas + + + org.slf4j + slf4j-api + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + junit + junit + test + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.assertj + assertj-core + test + + + org.junit.jupiter + junit-jupiter-params + test + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AggregateTaskExecution.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AggregateTaskExecution.java new file mode 100644 index 0000000000..71c5e6a5df --- /dev/null +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AggregateTaskExecution.java @@ -0,0 +1,243 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.schema; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + + +import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.util.Assert; + +/** + * Contains the attributes of a {@link TaskExecution} as well as the name of the {@link SchemaVersionTarget}. + * + * @author Corneil du Plessis + */ +public class AggregateTaskExecution { + /** + * The unique id associated with the task execution. + */ + private long executionId; + + /** + * The parent task execution id. + */ + private Long parentExecutionId; + + /** + * The recorded exit code for the task. + */ + private Integer exitCode; + + /** + * User defined name for the task. + */ + private String taskName; + + /** + * Time of when the task was started. + */ + private Date startTime; + + /** + * Timestamp of when the task was completed/terminated. + */ + private Date endTime; + + /** + * Message returned from the task or stacktrace. + */ + private String exitMessage; + + /** + * Id assigned to the task by the platform. + * + * @since 1.1.0 + */ + private String externalExecutionId; + + /** + * Error information available upon the failure of a task. + * + * @since 1.1.0 + */ + private String errorMessage; + + private String schemaTarget; + + private String platformName; + /** + * The arguments that were used for this task execution. + */ + private List arguments; + + public AggregateTaskExecution() { + this.arguments = new ArrayList<>(); + } + + public AggregateTaskExecution(long executionId, Integer exitCode, String taskName, + Date startTime, Date endTime, String exitMessage, List arguments, + String errorMessage, String externalExecutionId, Long parentExecutionId, String platformName, String schemaTarget) { + + Assert.notNull(arguments, "arguments must not be null"); + this.executionId = executionId; + this.exitCode = exitCode; + this.taskName = taskName; + this.exitMessage = exitMessage; + this.arguments = new ArrayList<>(arguments); + this.startTime = (startTime != null) ? (Date) startTime.clone() : null; + this.endTime = (endTime != null) ? (Date) endTime.clone() : null; + this.errorMessage = errorMessage; + this.externalExecutionId = externalExecutionId; + this.parentExecutionId = parentExecutionId; + this.schemaTarget = schemaTarget; + this.platformName = platformName; + } + + public AggregateTaskExecution(long executionId, Integer exitCode, String taskName, + Date startTime, Date endTime, String exitMessage, List arguments, + String errorMessage, String externalExecutionId, String platformName, String schemaTarget) { + + this(executionId, exitCode, taskName, startTime, endTime, exitMessage, arguments, + errorMessage, externalExecutionId, null, platformName, schemaTarget); + } + + public long getExecutionId() { + return this.executionId; + } + + public Integer getExitCode() { + return this.exitCode; + } + + public void setExitCode(Integer exitCode) { + this.exitCode = exitCode; + } + + public String getTaskName() { + return this.taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public Date getStartTime() { + return (this.startTime != null) ? (Date) this.startTime.clone() : null; + } + + public void setStartTime(Date startTime) { + this.startTime = (startTime != null) ? (Date) startTime.clone() : null; + } + + public Date getEndTime() { + return (this.endTime != null) ? (Date) this.endTime.clone() : null; + } + + public void setEndTime(Date endTime) { + this.endTime = (endTime != null) ? (Date) endTime.clone() : null; + } + + public String getExitMessage() { + return this.exitMessage; + } + + public void setExitMessage(String exitMessage) { + this.exitMessage = exitMessage; + } + + public List getArguments() { + return this.arguments; + } + + public void setArguments(List arguments) { + this.arguments = new ArrayList<>(arguments); + } + + public String getErrorMessage() { + return this.errorMessage; + } + + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + public String getExternalExecutionId() { + return this.externalExecutionId; + } + + public void setExternalExecutionId(String externalExecutionId) { + this.externalExecutionId = externalExecutionId; + } + + public Long getParentExecutionId() { + return this.parentExecutionId; + } + + public void setParentExecutionId(Long parentExecutionId) { + this.parentExecutionId = parentExecutionId; + } + + public String getSchemaTarget() { + return schemaTarget; + } + + public void setSchemaTarget(String schemaTarget) { + this.schemaTarget = schemaTarget; + } + + public String getPlatformName() { + return platformName; + } + + public void setPlatformName(String platformName) { + this.platformName = platformName; + } + + @Override + public String toString() { + return "AggregateTaskExecution{" + + "executionId=" + executionId + + ", parentExecutionId=" + parentExecutionId + + ", exitCode=" + exitCode + + ", taskName='" + taskName + '\'' + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", exitMessage='" + exitMessage + '\'' + + ", externalExecutionId='" + externalExecutionId + '\'' + + ", errorMessage='" + errorMessage + '\'' + + ", schemaTarget='" + schemaTarget + '\'' + + ", platformName='" + platformName + '\'' + + ", arguments=" + arguments + + '}'; + } + + public TaskExecution toTaskExecution() { + return new TaskExecution(executionId, + exitCode, + taskName, + startTime, + endTime, + exitMessage, + arguments, + errorMessage, + externalExecutionId, + parentExecutionId + ); + } +} diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersion.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersion.java similarity index 97% rename from spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersion.java rename to spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersion.java index e7d4125f98..8aba709aab 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersion.java +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersion.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.dataflow.schema; import java.util.Arrays; diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionDeserializer.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionDeserializer.java similarity index 96% rename from spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionDeserializer.java rename to spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionDeserializer.java index 8aab7e2b42..4d06fab996 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionDeserializer.java +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionDeserializer.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.dataflow.schema; import java.io.IOException; diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionSerializer.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionSerializer.java similarity index 96% rename from spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionSerializer.java rename to spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionSerializer.java index a9bc9a3566..1b612346ca 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionSerializer.java +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersionSerializer.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.dataflow.schema; import java.io.IOException; diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersions.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersions.java similarity index 97% rename from spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersions.java rename to spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersions.java index ec6f17d60b..780b2990ea 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersions.java +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootSchemaVersions.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.dataflow.schema; import java.util.List; diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootVersionConverter.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootVersionConverter.java similarity index 95% rename from spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootVersionConverter.java rename to spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootVersionConverter.java index 3e585a8e3b..f385847dbd 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/AppBootVersionConverter.java +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/AppBootVersionConverter.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.dataflow.schema; import org.springframework.core.convert.converter.Converter; diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/SchemaVersionTarget.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTarget.java similarity index 89% rename from spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/SchemaVersionTarget.java rename to spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTarget.java index 6f4dbf9b20..e1ce7f9d98 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/SchemaVersionTarget.java +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTarget.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.dataflow.schema; import java.util.Objects; @@ -95,8 +95,14 @@ public int hashCode() { return name != null ? name.hashCode() : 0; } public static SchemaVersionTarget createDefault(AppBootSchemaVersion schemaVersion) { + if(schemaVersion.equals(AppBootSchemaVersion.defaultVersion())) { + return new SchemaVersionTarget(schemaVersion.name().toLowerCase(), schemaVersion, "TASK_", "BATCH_", null); + } return new SchemaVersionTarget(schemaVersion.name().toLowerCase(), schemaVersion, schemaVersion.name() + "_TASK_", schemaVersion.name() + "_BATCH_", null); } + public static SchemaVersionTarget defaultTarget() { + return createDefault(AppBootSchemaVersion.defaultVersion()); + } @Override public String toString() { diff --git a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/SchemaVersionTargets.java b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTargets.java similarity index 96% rename from spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/SchemaVersionTargets.java rename to spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTargets.java index c4d45d4a15..92c3a75a25 100644 --- a/spring-cloud-dataflow-core/src/main/java/org/springframework/cloud/dataflow/core/SchemaVersionTargets.java +++ b/spring-cloud-dataflow-schema-core/src/main/java/org/springframework/cloud/dataflow/schema/SchemaVersionTargets.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.dataflow.schema; import java.util.List; diff --git a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionTests.java b/spring-cloud-dataflow-schema-core/src/test/java/org/springframework/cloud/schema/AppBootSchemaVersionTests.java similarity index 94% rename from spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionTests.java rename to spring-cloud-dataflow-schema-core/src/test/java/org/springframework/cloud/schema/AppBootSchemaVersionTests.java index 2b69ee7ad0..6aa2b89422 100644 --- a/spring-cloud-dataflow-core/src/test/java/org/springframework/cloud/dataflow/core/AppBootSchemaVersionTests.java +++ b/spring-cloud-dataflow-schema-core/src/test/java/org/springframework/cloud/schema/AppBootSchemaVersionTests.java @@ -14,13 +14,15 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.core; +package org.springframework.cloud.schema; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.NullAndEmptySource; import org.junit.jupiter.params.provider.ValueSource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; + import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; diff --git a/spring-cloud-dataflow-schema/pom.xml b/spring-cloud-dataflow-schema/pom.xml new file mode 100644 index 0000000000..de934ff828 --- /dev/null +++ b/spring-cloud-dataflow-schema/pom.xml @@ -0,0 +1,79 @@ + + + 4.0.0 + + org.springframework.cloud + spring-cloud-dataflow-parent + 2.11.0-SNAPSHOT + ../spring-cloud-dataflow-parent + + spring-cloud-dataflow-schema + jar + + true + + + + org.springframework + spring-core + + + org.springframework + spring-context + compile + + + org.springframework.cloud + spring-cloud-task-batch + + + org.springframework.cloud + spring-cloud-dataflow-schema-core + ${project.version} + + + org.springframework.hateoas + spring-hateoas + + + org.slf4j + slf4j-api + + + javax.annotation + javax.annotation-api + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + junit + junit + test + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.assertj + assertj-core + test + + + org.junit.jupiter + junit-jupiter-params + test + + + \ No newline at end of file diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SchemaService.java b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaService.java similarity index 77% rename from spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SchemaService.java rename to spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaService.java index 665452999b..00cdaa9c3d 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/SchemaService.java +++ b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaService.java @@ -14,11 +14,11 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.server.service; +package org.springframework.cloud.dataflow.schema.service; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersions; -import org.springframework.cloud.dataflow.core.SchemaVersionTarget; -import org.springframework.cloud.dataflow.core.SchemaVersionTargets; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; /** * Schema service provides information about Spring Boot schema versions along with all targets and defaults. diff --git a/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaServiceConfiguration.java b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaServiceConfiguration.java new file mode 100644 index 0000000000..109ddee147 --- /dev/null +++ b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/SchemaServiceConfiguration.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.schema.service; + +import javax.annotation.PostConstruct; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Configuration for schema service and related components. + * @author Corneil du Plessis + */ +@Configuration +public class SchemaServiceConfiguration { + private static final Logger logger = LoggerFactory.getLogger(SchemaServiceConfiguration.class); + @Bean + public SchemaService schemaService() { + logger.info("schemaService:starting"); + try { + return new DefaultSchemaService(); + } finally { + logger.info("schemaService:started"); + } + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration"); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchemaService.java b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/impl/DefaultSchemaService.java similarity index 57% rename from spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchemaService.java rename to spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/impl/DefaultSchemaService.java index 20485a1e84..9cae739434 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchemaService.java +++ b/spring-cloud-dataflow-schema/src/main/java/org/springframework/cloud/dataflow/schema/service/impl/DefaultSchemaService.java @@ -14,29 +14,33 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.server.service.impl; +package org.springframework.cloud.dataflow.schema.service.impl; +import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersions; -import org.springframework.cloud.dataflow.core.SchemaVersionTarget; -import org.springframework.cloud.dataflow.core.SchemaVersionTargets; -import org.springframework.cloud.dataflow.server.service.SchemaService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.stereotype.Service; /** * Implements a simple service to provide Schema versions and targets. - * + * In the future this will use a database to store the {@link SchemaVersionTarget} * @author Corneil du Plessis */ -@Service public class DefaultSchemaService implements SchemaService { - private Map targets; + private static final Logger logger = LoggerFactory.getLogger(DefaultSchemaService.class); + private final Map targets; public DefaultSchemaService() { targets = Arrays.stream(AppBootSchemaVersion.values()) @@ -51,12 +55,22 @@ public AppBootSchemaVersions getVersions() { @Override public SchemaVersionTargets getTargets() { - return new SchemaVersionTargets(AppBootSchemaVersion.defaultVersion().name().toLowerCase(), new ArrayList<>(targets.values())); + return new SchemaVersionTargets(getDefaultSchemaTarget(), new ArrayList<>(targets.values())); + } + + private static String getDefaultSchemaTarget() { + return AppBootSchemaVersion.defaultVersion().name().toLowerCase(); } @Override public SchemaVersionTarget getTarget(String name) { - // TODO we can decide the throw an exception resulting in NOT_FOUND. + if (name == null) { + name = getDefaultSchemaTarget(); + } return targets.get(name); } + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService"); + } } diff --git a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchemaServiceTests.java b/spring-cloud-dataflow-schema/src/test/java/org/springframework/cloud/schema/service/impl/DefaultSchemaServiceTests.java similarity index 82% rename from spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchemaServiceTests.java rename to spring-cloud-dataflow-schema/src/test/java/org/springframework/cloud/schema/service/impl/DefaultSchemaServiceTests.java index 28c15504f7..463441df7a 100644 --- a/spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/service/impl/DefaultSchemaServiceTests.java +++ b/spring-cloud-dataflow-schema/src/test/java/org/springframework/cloud/schema/service/impl/DefaultSchemaServiceTests.java @@ -14,18 +14,20 @@ * limitations under the License. */ -package org.springframework.cloud.dataflow.server.service.impl; +package org.springframework.cloud.schema.service.impl; import java.util.HashSet; -import org.junit.Test; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersions; -import org.springframework.cloud.dataflow.core.SchemaVersionTarget; -import org.springframework.cloud.dataflow.core.SchemaVersionTargets; -import org.springframework.cloud.dataflow.server.service.SchemaService; +import org.junit.jupiter.api.Test; + +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.impl.DefaultSchemaService; import static org.assertj.core.api.Assertions.assertThat; diff --git a/spring-cloud-dataflow-server-core/pom.xml b/spring-cloud-dataflow-server-core/pom.xml index 9b42601b16..9f09c7687d 100644 --- a/spring-cloud-dataflow-server-core/pom.xml +++ b/spring-cloud-dataflow-server-core/pom.xml @@ -12,7 +12,6 @@ jar true - 0.5C @@ -35,6 +34,16 @@ org.hibernate hibernate-micrometer + + org.springframework.cloud + spring-cloud-dataflow-schema + ${project.version} + + + org.springframework.cloud + spring-cloud-dataflow-aggregate-task + ${project.version} + org.springframework.cloud spring-cloud-dataflow-common-flyway @@ -164,7 +173,6 @@ org.springframework.cloud spring-cloud-task-batch - test org.skyscreamer @@ -232,6 +240,11 @@ mariadb test + + org.testcontainers + postgresql + test + diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java index fdb84fd86c..56b7616766 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JdbcSearchableJobExecutionDao.java @@ -35,6 +35,9 @@ import org.springframework.batch.item.database.Order; import org.springframework.batch.item.database.PagingQueryProvider; import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; @@ -51,14 +54,14 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement private static final String GET_COUNT = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION"; - private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=?"; + private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where I.JOB_NAME=?"; - private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and E.STATUS = ?"; + private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where E.STATUS = ?"; - private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and I.JOB_NAME=? AND E.STATUS = ?"; + private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(1) from %PREFIX%JOB_EXECUTION E " + + "JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where I.JOB_NAME=? AND E.STATUS = ?"; private static final String FIELDS = "E.JOB_EXECUTION_ID, E.START_TIME, E.END_TIME, E.STATUS, E.EXIT_CODE, E.EXIT_MESSAGE, " + "E.CREATE_TIME, E.LAST_UPDATED, E.VERSION, I.JOB_INSTANCE_ID, I.JOB_NAME"; @@ -68,8 +71,7 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement private static final String GET_RUNNING_EXECUTIONS = "SELECT " + FIELDS - + " from %PREFIX%JOB_EXECUTION E, %PREFIX%JOB_INSTANCE I " - + "where E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID and E.END_TIME is NULL"; + + " from %PREFIX%JOB_EXECUTION E JOIN %PREFIX%JOB_INSTANCE I ON E.JOB_INSTANCE_ID=I.JOB_INSTANCE_ID where E.END_TIME is NULL"; private static final String NAME_FILTER = "I.JOB_NAME LIKE ?"; @@ -84,7 +86,7 @@ public class JdbcSearchableJobExecutionDao extends JdbcJobExecutionDao implement private static final String TASK_EXECUTION_ID_FILTER = "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.TASK_EXECUTION_ID = ?"; - private static final String FROM_CLAUSE_TASK_TASK_BATCH = "TASK_TASK_BATCH B"; + private static final String FROM_CLAUSE_TASK_TASK_BATCH = "%PREFIX%TASK_BATCH B"; private PagingQueryProvider allExecutionsPagingQueryProvider; @@ -274,8 +276,10 @@ public List getJobExecutionsWithStepCountFilteredByJo @Override public List getJobExecutionsWithStepCountFilteredByTaskExecutionId( int taskExecutionId, int start, int count) { + // TODO find schemaVersionTarget for the taskExecutionId if (start <= 0) { - return getJdbcTemplate().query(byTaskExecutionIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), + return getJdbcTemplate().query(SchemaUtilities.getQuery(byTaskExecutionIdWithStepCountPagingQueryProvider.generateFirstPageQuery(count), + SchemaVersionTarget.defaultTarget().getName()), new JobExecutionStepCountRowMapper(), taskExecutionId); } try { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java index 3cfa2157ec..32048cad06 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/JobService.java @@ -272,7 +272,8 @@ Collection getJobExecutionsForJobInstance(String jobName, Long job * @throws NoSuchJobExecutionException thrown if job execution specified does not exist */ Collection getStepExecutions(Long jobExecutionId) throws NoSuchJobExecutionException; - + Collection getStepExecutions(JobExecution jobExecution) throws NoSuchJobExecutionException; + void addStepExecutions(JobExecution jobExecution); /** * List the {@link StepExecution step executions} for a step in descending order of * creation (usually close to execution order). @@ -319,6 +320,7 @@ Collection listStepExecutionsForStep(String jobName, String stepN */ StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException; + StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) throws NoSuchStepExecutionException; /** * Send a stop signal to all running job executions. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java index cf509e0fcb..206f6d1fc7 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobService.java @@ -1,5 +1,5 @@ /* - * Copyright 2009-2019 the original author or authors. + * Copyright 2009-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,6 +15,7 @@ */ package org.springframework.cloud.dataflow.server.batch; +import javax.batch.operations.JobOperator; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -24,11 +25,10 @@ import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; +import java.util.Objects; import java.util.Properties; import java.util.Set; -import javax.batch.operations.JobOperator; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,6 +49,7 @@ import org.springframework.core.io.Resource; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; @@ -59,7 +60,7 @@ * @author Dave Syer * @author Michael Minella * @author Glenn Renfro - * + * @author Corneil du Plessis */ public class SimpleJobService implements JobService, DisposableBean { @@ -85,8 +86,8 @@ public class SimpleJobService implements JobService, DisposableBean { private int shutdownTimeout = DEFAULT_SHUTDOWN_TIMEOUT; public SimpleJobService(SearchableJobInstanceDao jobInstanceDao, SearchableJobExecutionDao jobExecutionDao, - SearchableStepExecutionDao stepExecutionDao, JobRepository jobRepository, - ExecutionContextDao executionContextDao, JobOperator jsrJobOperator) { + SearchableStepExecutionDao stepExecutionDao, JobRepository jobRepository, + ExecutionContextDao executionContextDao, JobOperator jsrJobOperator) { super(); this.jobInstanceDao = jobInstanceDao; this.jobExecutionDao = jobExecutionDao; @@ -96,8 +97,7 @@ public SimpleJobService(SearchableJobInstanceDao jobInstanceDao, SearchableJobEx if (jsrJobOperator == null) { logger.warn("No JobOperator compatible with JSR-352 was provided."); - } - else { + } else { this.jsrJobOperator = jsrJobOperator; } } @@ -118,11 +118,20 @@ public Collection getStepExecutions(Long jobExecutionId) throws N if (jobExecution == null) { throw new NoSuchJobExecutionException("No JobExecution with id=" + jobExecutionId); } + return getStepExecutions(jobExecution); - stepExecutionDao.addStepExecutions(jobExecution); + } + @Override + public Collection getStepExecutions(JobExecution jobExecution) { + Assert.notNull(jobExecution, "jobExecution required"); + stepExecutionDao.addStepExecutions(jobExecution); return jobExecution.getStepExecutions(); + } + @Override + public void addStepExecutions(JobExecution jobExecution) { + stepExecutionDao.addStepExecutions(jobExecution); } /** @@ -131,7 +140,6 @@ public Collection getStepExecutions(Long jobExecutionId) throws N * * @param jobExecutionId the job execution to restart * @return Instance of {@link JobExecution} associated with the restart. - * * @throws NoSuchJobException thrown if job does not exist */ @Override @@ -148,12 +156,10 @@ public JobExecution restart(Long jobExecutionId, JobParameters params) throws No if (jsrJobOperator != null) { if (params != null) { jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, params.toProperties())); - } - else { + } else { jobExecution = new JobExecution(jsrJobOperator.restart(jobExecutionId, new Properties())); } - } - else { + } else { throw new NoSuchJobException(String.format("Can't find job associated with job execution id %s to restart", String.valueOf(jobExecutionId))); } @@ -167,8 +173,7 @@ public JobExecution launch(String jobName, JobParameters jobParameters) throws N if (jsrJobOperator != null) { jobExecution = new JobExecution(jsrJobOperator.start(jobName, jobParameters.toProperties())); - } - else { + } else { throw new NoSuchJobException(String.format("Unable to find job %s to launch", String.valueOf(jobName))); } @@ -227,8 +232,7 @@ private Collection getJsrJobNames() { String jobXmlFileName = resource.getFilename(); jsr352JobNames.add(jobXmlFileName.substring(0, jobXmlFileName.length() - 4)); } - } - catch (IOException e) { + } catch (IOException e) { logger.debug("Unable to list JSR-352 batch jobs", e); } @@ -248,8 +252,7 @@ public int stopAll() { for (JobExecution jobExecution : result) { if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) { jsrJobOperator.stop(jobExecution.getId()); - } - else { + } else { jobExecution.stop(); jobRepository.update(jobExecution); } @@ -272,8 +275,7 @@ public JobExecution stop(Long jobExecutionId) throws NoSuchJobExecutionException if (jsrJobOperator != null && jsrJobNames.contains(jobExecution.getJobInstance().getJobName())) { jsrJobOperator.stop(jobExecutionId); jobExecution = getJobExecution(jobExecutionId); - } - else { + } else { jobExecution.stop(); jobRepository.update(jobExecution); } @@ -299,8 +301,7 @@ public JobExecution abandon(Long jobExecutionId) throws NoSuchJobExecutionExcept if (jsrJobOperator != null && jsrJobNames.contains(jobInstance.getJobName())) { jsrJobOperator.abandon(jobExecutionId); jobExecution = getJobExecution(jobExecutionId); - } - else { + } else { jobExecution.upgradeStatus(BatchStatus.ABANDONED); jobExecution.setEndTime(new Date()); jobRepository.update(jobExecution); @@ -320,8 +321,7 @@ private int countJobExecutions(String jobName, BatchStatus status) throws NoSuch if (status != null) { return jobExecutionDao.countJobExecutions(status); } - } - else { + } else { if (status != null) { return jobExecutionDao.countJobExecutions(jobName, status); } @@ -342,11 +342,10 @@ public JobExecution getJobExecution(Long jobExecutionId) throws NoSuchJobExecuti if (jobExecution == null) { throw new NoSuchJobExecutionException("There is no JobExecution with id=" + jobExecutionId); } - jobExecution.setJobInstance(jobInstanceDao.getJobInstance(jobExecution)); + jobExecution.setJobInstance(Objects.requireNonNull(jobInstanceDao.getJobInstance(jobExecution))); try { jobExecution.setExecutionContext(executionContextDao.getExecutionContext(jobExecution)); - } - catch (Exception e) { + } catch (Exception e) { logger.info("Cannot load execution context for job execution: " + jobExecution); } stepExecutionDao.addStepExecutions(jobExecution); @@ -357,8 +356,7 @@ public JobExecution getJobExecution(Long jobExecutionId) throws NoSuchJobExecuti public Collection getJobExecutionsForJobInstance(String name, Long jobInstanceId) throws NoSuchJobException { checkJobExists(name); - List jobExecutions = jobExecutionDao.findJobExecutions(jobInstanceDao - .getJobInstance(jobInstanceId)); + List jobExecutions = jobExecutionDao.findJobExecutions(Objects.requireNonNull(jobInstanceDao.getJobInstance(jobInstanceId))); for (JobExecution jobExecution : jobExecutions) { stepExecutionDao.addStepExecutions(jobExecution); } @@ -369,15 +367,19 @@ public Collection getJobExecutionsForJobInstance(String name, Long public StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) throws NoSuchJobExecutionException, NoSuchStepExecutionException { JobExecution jobExecution = getJobExecution(jobExecutionId); + return getStepExecution(jobExecution, stepExecutionId); + } + + @Override + public StepExecution getStepExecution(JobExecution jobExecution, Long stepExecutionId) throws NoSuchStepExecutionException { StepExecution stepExecution = stepExecutionDao.getStepExecution(jobExecution, stepExecutionId); if (stepExecution == null) { - throw new NoSuchStepExecutionException("There is no StepExecution with jobExecutionId=" + jobExecutionId + throw new NoSuchStepExecutionException("There is no StepExecution with jobExecutionId=" + jobExecution.getId() + " and id=" + stepExecutionId); } try { stepExecution.setExecutionContext(executionContextDao.getExecutionContext(stepExecution)); - } - catch (Exception e) { + } catch (Exception e) { logger.info("Cannot load execution context for step execution: " + stepExecution); } return stepExecution; @@ -385,7 +387,7 @@ public StepExecution getStepExecution(Long jobExecutionId, Long stepExecutionId) @Override public Collection listJobExecutionsForJobWithStepCount(String jobName, int start, - int count) + int count) throws NoSuchJobException { checkJobExists(jobName); return jobExecutionDao.getJobExecutionsWithStepCount(jobName, start, count); @@ -438,7 +440,7 @@ public Collection getStepNamesForJob(String jobName) throws NoSuchJobExc @Override public Collection listJobExecutionsForJob(String jobName, BatchStatus status, int pageOffset, - int pageSize) { + int pageSize) { List jobExecutions = getJobExecutions(jobName, status, pageOffset, pageSize); for (JobExecution jobExecution : jobExecutions) { @@ -450,8 +452,8 @@ public Collection listJobExecutionsForJob(String jobName, BatchSta @Override public Collection listJobExecutionsForJobWithStepCount(Date fromDate, - Date toDate, int start, int count) { - return jobExecutionDao.getJobExecutionsWithStepCount(fromDate, toDate, start, count); + Date toDate, int start, int count) { + return jobExecutionDao.getJobExecutionsWithStepCount(fromDate, toDate, start, count); } @Override @@ -471,8 +473,7 @@ private List getJobExecutions(String jobName, BatchStatus status, if (status != null) { return jobExecutionDao.getJobExecutions(status, pageOffset, pageSize); } - } - else { + } else { if (status != null) { return jobExecutionDao.getJobExecutions(jobName, status, pageOffset, pageSize); } @@ -502,11 +503,9 @@ public void destroy() throws Exception { if (jobExecution.isRunning()) { stop(jobExecution.getId()); } - } - catch (JobExecutionNotRunningException e) { + } catch (JobExecutionNotRunningException e) { logger.info("JobExecution is not running so it cannot be stopped"); - } - catch (Exception e) { + } catch (Exception e) { logger.error("Unexpected exception stopping JobExecution", e); if (firstException == null) { firstException = e; @@ -535,12 +534,11 @@ public void destroy() throws Exception { @Scheduled(fixedDelay = 60000) public void removeInactiveExecutions() { - for (Iterator iterator = activeExecutions.iterator(); iterator.hasNext();) { + for (Iterator iterator = activeExecutions.iterator(); iterator.hasNext(); ) { JobExecution jobExecution = iterator.next(); try { jobExecution = getJobExecution(jobExecution.getId()); - } - catch (NoSuchJobExecutionException e) { + } catch (NoSuchJobExecutionException e) { logger.error("Unexpected exception loading JobExecution", e); } if (!jobExecution.isRunning()) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java index 19cf8110f3..f502a8e974 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/batch/SimpleJobServiceFactoryBean.java @@ -39,6 +39,7 @@ import org.springframework.batch.support.DatabaseType; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.lob.DefaultLobHandler; @@ -187,7 +188,7 @@ public void afterPropertiesSet() throws Exception { jdbcTemplate = new JdbcTemplate(dataSource); if (incrementerFactory == null) { - incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); + incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); } if (databaseType == null) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java new file mode 100644 index 0000000000..15b8042510 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/AggregateDataFlowTaskConfiguration.java @@ -0,0 +1,167 @@ +/* + * Copyright 2023-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.config; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; +import org.springframework.beans.BeanUtils; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.repository.AggregateJobQueryDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDefinitionReader; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDeploymentReader; +import org.springframework.cloud.dataflow.server.repository.JdbcAggregateJobQueryDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.JobExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.cloud.dataflow.server.repository.TaskBatchDaoContainer; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; +import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; +import org.springframework.cloud.dataflow.server.repository.TaskExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; +import org.springframework.cloud.dataflow.server.service.JobExplorerContainer; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.cloud.task.configuration.TaskProperties; +import org.springframework.cloud.task.repository.support.DatabaseType; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jdbc.support.MetaDataAccessException; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * Configuration for DAO Containers use for multiple schema targets. + * + * @author Corneil du Plessis + */ +@Configuration +public class AggregateDataFlowTaskConfiguration { + private static final Logger logger = LoggerFactory.getLogger(AggregateDataFlowTaskConfiguration.class); + + @Bean + public DataflowJobExecutionDaoContainer dataflowJobExecutionDao(DataSource dataSource, SchemaService schemaService) { + DataflowJobExecutionDaoContainer result = new DataflowJobExecutionDaoContainer(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowJobExecutionDao dao = new JdbcDataflowJobExecutionDao(dataSource, target.getBatchPrefix()); + result.add(target.getName(), dao); + } + return result; + } + + @Bean + public DataflowTaskExecutionDaoContainer dataflowTaskExecutionDao(DataSource dataSource, SchemaService schemaService, TaskProperties taskProperties) { + DataflowTaskExecutionDaoContainer result = new DataflowTaskExecutionDaoContainer(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + TaskProperties properties = new TaskProperties(); + BeanUtils.copyProperties(taskProperties, properties); + properties.setTablePrefix(target.getTaskPrefix()); + DataflowTaskExecutionDao dao = new JdbcDataflowTaskExecutionDao(dataSource, properties); + result.add(target.getName(), dao); + } + return result; + } + + @Bean + public DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDao(DataSource dataSource, SchemaService schemaService) { + DataFieldMaxValueIncrementerFactory incrementerFactory = new MultiSchemaIncrementerFactory(dataSource); + String databaseType; + try { + databaseType = DatabaseType.fromMetaData(dataSource).name(); + } catch (MetaDataAccessException e) { + throw new IllegalStateException(e); + } + DataflowTaskExecutionMetadataDaoContainer result = new DataflowTaskExecutionMetadataDaoContainer(); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowTaskExecutionMetadataDao dao = new JdbcDataflowTaskExecutionMetadataDao( + dataSource, + incrementerFactory.getIncrementer(databaseType, + SchemaUtilities.getQuery("%PREFIX%EXECUTION_METADATA_SEQ", target.getTaskPrefix()) + ), + target.getTaskPrefix() + ); + result.add(target.getName(), dao); + } + return result; + } + + @Bean + public TaskExecutionDaoContainer taskExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + return new TaskExecutionDaoContainer(dataSource, schemaService); + } + + @Bean + public JobRepositoryContainer jobRepositoryContainer(DataSource dataSource, PlatformTransactionManager platformTransactionManager, SchemaService schemaService) { + return new JobRepositoryContainer(dataSource, platformTransactionManager, schemaService); + } + + @Bean + public JobExplorerContainer jobExplorerContainer(DataSource dataSource, SchemaService schemaService) { + return new JobExplorerContainer(dataSource, schemaService); + } + + @Bean + public JobServiceContainer jobServiceContainer(DataSource dataSource, PlatformTransactionManager platformTransactionManager, SchemaService schemaService, JobRepositoryContainer jobRepositoryContainer, JobExplorerContainer jobExplorerContainer) { + return new JobServiceContainer(dataSource, platformTransactionManager, schemaService, jobRepositoryContainer, jobExplorerContainer); + } + + @Bean + public JobExecutionDaoContainer jobExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + return new JobExecutionDaoContainer(dataSource, schemaService); + } + + @Bean + @ConditionalOnMissingBean + public TaskDefinitionReader taskDefinitionReader(TaskDefinitionRepository repository) { + return new DefaultTaskDefinitionReader(repository); + } + + @Bean + @ConditionalOnMissingBean + public TaskDeploymentReader taskDeploymentReader(TaskDeploymentRepository repository) { + return new DefaultTaskDeploymentReader(repository); + } + + @Bean + public AggregateJobQueryDao aggregateJobQueryDao(DataSource dataSource, SchemaService schemaService, JobServiceContainer jobServiceContainer) throws Exception { + return new JdbcAggregateJobQueryDao(dataSource, schemaService, jobServiceContainer); + } + + @Bean + public TaskBatchDaoContainer taskBatchDaoContainer(DataSource dataSource, SchemaService schemaService) { + return new TaskBatchDaoContainer(dataSource, schemaService); + } + + @PostConstruct + public void setup() { + logger.info("created: org.springframework.cloud.dataflow.server.config.AggregateDataFlowContainerConfiguration"); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java index ea7a9e90ce..1d30788d5b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowControllerAutoConfiguration.java @@ -38,6 +38,7 @@ import org.springframework.cloud.common.security.core.support.OAuth2AccessTokenProvidingClientHttpRequestInterceptor; import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; import org.springframework.cloud.common.security.support.SecurityStateBean; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; import org.springframework.cloud.dataflow.audit.repository.AuditRecordRepository; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.audit.service.DefaultAuditRecordService; @@ -53,9 +54,9 @@ import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.StreamDefinitionResource; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.cloud.dataflow.server.DockerValidatorProperties; import org.springframework.cloud.dataflow.server.TaskValidationController; -import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.config.features.ConditionalOnStreamsEnabled; import org.springframework.cloud.dataflow.server.config.features.ConditionalOnTasksEnabled; @@ -98,9 +99,11 @@ import org.springframework.cloud.dataflow.server.job.LauncherRepository; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; import org.springframework.cloud.dataflow.server.service.LauncherService; import org.springframework.cloud.dataflow.server.service.SchedulerService; -import org.springframework.cloud.dataflow.server.service.SchemaService; import org.springframework.cloud.dataflow.server.service.SpringSecurityAuditorAware; import org.springframework.cloud.dataflow.server.service.StreamService; import org.springframework.cloud.dataflow.server.service.StreamValidationService; @@ -113,7 +116,6 @@ import org.springframework.cloud.dataflow.server.service.impl.AppDeploymentRequestCreator; import org.springframework.cloud.dataflow.server.service.impl.ComposedTaskRunnerConfigurationProperties; import org.springframework.cloud.dataflow.server.service.impl.DefaultLauncherService; -import org.springframework.cloud.dataflow.server.service.impl.DefaultSchemaService; import org.springframework.cloud.dataflow.server.service.impl.DefaultStreamService; import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; import org.springframework.cloud.dataflow.server.service.impl.validation.DefaultStreamValidationService; @@ -127,7 +129,6 @@ import org.springframework.cloud.skipper.client.SkipperClientProperties; import org.springframework.cloud.skipper.client.SkipperClientResponseErrorHandler; import org.springframework.cloud.skipper.client.util.HttpClientConfigurer; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -213,14 +214,7 @@ public RestControllerAdvice restControllerAdvice() { return new RestControllerAdvice(); } - @Configuration - public static class SchemaConfiguration { - @Bean - @ConditionalOnMissingBean - public SchemaService schemaService() { - return new DefaultSchemaService(); - } - } + @Configuration public static class AppRegistryConfiguration { @@ -276,13 +270,25 @@ public SchemaController schemaController(SchemaService schemaService) { } @Bean - public TaskExecutionController taskExecutionController(TaskExplorer explorer, - TaskExecutionService taskExecutionService, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionInfoService taskExecutionInfoService, - TaskDeleteService taskDeleteService, TaskJobService taskJobService) { - return new TaskExecutionController(explorer, taskExecutionService, taskDefinitionRepository, + public TaskExecutionController taskExecutionController( + AggregateTaskExplorer explorer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskExecutionService taskExecutionService, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeleteService taskDeleteService, + TaskJobService taskJobService + ) { + return new TaskExecutionController(explorer, + aggregateExecutionSupport, + taskExecutionService, + taskDefinitionRepository, + taskDefinitionReader, taskExecutionInfoService, - taskDeleteService, taskJobService); + taskDeleteService, + taskJobService + ); } @Bean @@ -293,15 +299,22 @@ public TaskPlatformController taskLauncherController(LauncherService launcherSer @Bean @ConditionalOnMissingBean public TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider( - TaskExecutionService taskExecutionService, TaskJobService taskJobService, TaskExplorer taskExplorer) { - return new DefaultTaskDefinitionAssemblerProvider(taskExecutionService, taskJobService, taskExplorer); + TaskExecutionService taskExecutionService, + TaskJobService taskJobService, + AggregateTaskExplorer taskExplorer, + AggregateExecutionSupport aggregateExecutionSupport + ) { + return new DefaultTaskDefinitionAssemblerProvider(taskExecutionService, taskJobService, taskExplorer, aggregateExecutionSupport); } @Bean - public TaskDefinitionController taskDefinitionController(TaskExplorer taskExplorer, - TaskDefinitionRepository repository, TaskSaveService taskSaveService, + public TaskDefinitionController taskDefinitionController( + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository repository, + TaskSaveService taskSaveService, TaskDeleteService taskDeleteService, - TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { + TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider + ) { return new TaskDefinitionController(taskExplorer, repository, taskSaveService, taskDeleteService, taskDefinitionAssemblerProvider); } @@ -322,13 +335,13 @@ public JobExecutionThinController jobExecutionThinController(TaskJobService repo } @Bean - public JobStepExecutionController jobStepExecutionController(JobService service) { - return new JobStepExecutionController(service); + public JobStepExecutionController jobStepExecutionController(TaskJobService taskJobService) { + return new JobStepExecutionController(taskJobService); } @Bean - public JobStepExecutionProgressController jobStepExecutionProgressController(JobService service) { - return new JobStepExecutionProgressController(service); + public JobStepExecutionProgressController jobStepExecutionProgressController(JobServiceContainer jobServiceContainer, TaskJobService taskJobService) { + return new JobStepExecutionProgressController(jobServiceContainer, taskJobService); } @Bean diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java index e7eafe0837..96bdbbfe6b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/DataFlowServerConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2015-2022 the original author or authors. + * Copyright 2015-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,36 +20,31 @@ import javax.servlet.Filter; import javax.sql.DataSource; -import org.springframework.batch.core.repository.dao.AbstractJdbcBatchMetadataDao; -import org.springframework.batch.item.database.support.DataFieldMaxValueIncrementerFactory; -import org.springframework.batch.item.database.support.DefaultDataFieldMaxValueIncrementerFactory; import org.springframework.beans.factory.ObjectProvider; import org.springframework.boot.autoconfigure.batch.BatchProperties; import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers; import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskConfiguration; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; +import org.springframework.cloud.dataflow.aggregate.task.impl.DefaultTaskRepositoryContainer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.completion.CompletionConfiguration; import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepositoryCustom; import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepositoryImpl; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.config.features.FeaturesConfiguration; import org.springframework.cloud.dataflow.server.config.web.WebConfiguration; import org.springframework.cloud.dataflow.server.db.migration.DataFlowFlywayConfigurationCustomizer; -import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.JdbcDataflowTaskExecutionMetadataDao; import org.springframework.cloud.dataflow.server.support.AuthenticationSuccessEventListener; import org.springframework.cloud.task.configuration.TaskProperties; -import org.springframework.cloud.task.repository.support.DatabaseType; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; import org.springframework.data.web.config.EnableSpringDataWebSupport; -import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.web.filter.ForwardedHeaderFilter; @@ -68,11 +63,21 @@ * @author Michael Minella * @author Gunnar Hillert * @author Michael Wirth + * @author Corneil du Plessis */ @EnableSpringDataWebSupport @Configuration -@Import({ CompletionConfiguration.class, FeaturesConfiguration.class, WebConfiguration.class, H2ServerConfiguration.class }) +@Import({ + CompletionConfiguration.class, + FeaturesConfiguration.class, + WebConfiguration.class, + H2ServerConfiguration.class, + SchemaServiceConfiguration.class, + AggregateTaskConfiguration.class, + AggregateDataFlowTaskConfiguration.class +}) @EnableConfigurationProperties({ BatchProperties.class, CommonApplicationProperties.class }) +@ComponentScan(basePackages = {"org.springframework.cloud.dataflow.schema.service", "org.springframework.cloud.dataflow.aggregate.task"}) public class DataFlowServerConfiguration { @Bean @@ -94,34 +99,12 @@ public PlatformTransactionManager transactionManager( return transactionManager; } - @Bean - DataflowJobExecutionDao dataflowJobExecutionDao(DataSource dataSource) { - return new JdbcDataflowJobExecutionDao(dataSource, AbstractJdbcBatchMetadataDao.DEFAULT_TABLE_PREFIX); - } @Bean public TaskProperties taskProperties() { return new TaskProperties(); } - @Bean - DataflowTaskExecutionDao dataflowTaskExecutionDao(DataSource dataSource, TaskProperties taskProperties) { - return new JdbcDataflowTaskExecutionDao(dataSource, taskProperties); - } - - @Bean - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao(DataSource dataSource) { - DataFieldMaxValueIncrementerFactory incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); - String databaseType; - try { - databaseType = DatabaseType.fromMetaData(dataSource).name(); - } - catch (MetaDataAccessException e) { - throw new IllegalStateException(e); - } - return new JdbcDataflowTaskExecutionMetadataDao(dataSource, incrementerFactory.getIncrementer(databaseType, - "task_execution_metadata_seq")); - } @Bean public AuthenticationSuccessEventListener authenticationSuccessEventListener( diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java index b9afc8d320..089c0723b0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/features/TaskConfiguration.java @@ -16,38 +16,44 @@ package org.springframework.cloud.dataflow.server.config.features; -import java.util.List; - import javax.sql.DataSource; +import java.util.List; -import org.springframework.batch.core.explore.JobExplorer; -import org.springframework.batch.core.explore.support.JobExplorerFactoryBean; -import org.springframework.batch.core.launch.support.SimpleJobLauncher; -import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskConfiguration; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; import org.springframework.cloud.dataflow.core.TaskPlatform; import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.schema.service.SchemaServiceConfiguration; import org.springframework.cloud.dataflow.server.DockerValidatorProperties; -import org.springframework.cloud.dataflow.server.batch.JobService; -import org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean; +import org.springframework.cloud.dataflow.server.config.AggregateDataFlowTaskConfiguration; import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties; import org.springframework.cloud.dataflow.server.job.LauncherRepository; -import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; -import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDefinitionReader; +import org.springframework.cloud.dataflow.server.repository.DefaultTaskDeploymentReader; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; +import org.springframework.cloud.dataflow.server.repository.AggregateJobQueryDao; import org.springframework.cloud.dataflow.server.service.DeployerConfigurationMetadataResolver; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; import org.springframework.cloud.dataflow.server.service.LauncherInitializationService; import org.springframework.cloud.dataflow.server.service.SchedulerService; -import org.springframework.cloud.dataflow.server.service.SchemaService; import org.springframework.cloud.dataflow.server.service.TaskDeleteService; import org.springframework.cloud.dataflow.server.service.TaskExecutionCreationService; import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; @@ -55,7 +61,6 @@ import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.cloud.dataflow.server.service.TaskSaveService; import org.springframework.cloud.dataflow.server.service.impl.ComposedTaskRunnerConfigurationProperties; -import org.springframework.cloud.dataflow.server.service.impl.DefaultSchemaService; import org.springframework.cloud.dataflow.server.service.impl.DefaultTaskDeleteService; import org.springframework.cloud.dataflow.server.service.impl.DefaultTaskExecutionInfoService; import org.springframework.cloud.dataflow.server.service.impl.DefaultTaskExecutionRepositoryService; @@ -65,15 +70,12 @@ import org.springframework.cloud.dataflow.server.service.impl.TaskAppDeploymentRequestCreator; import org.springframework.cloud.dataflow.server.service.impl.TaskConfigurationProperties; import org.springframework.cloud.deployer.spi.scheduler.Scheduler; -import org.springframework.cloud.task.repository.TaskExplorer; -import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Profile; import org.springframework.data.map.repository.config.EnableMapRepositories; import org.springframework.lang.Nullable; -import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.EnableTransactionManagement; /** @@ -89,12 +91,21 @@ */ @Configuration(proxyBeanMethods = false) @ConditionalOnTasksEnabled -@EnableConfigurationProperties({ TaskConfigurationProperties.class, CommonApplicationProperties.class, - DockerValidatorProperties.class, LocalPlatformProperties.class, ComposedTaskRunnerConfigurationProperties.class +@EnableConfigurationProperties({ + TaskConfigurationProperties.class, + CommonApplicationProperties.class, + DockerValidatorProperties.class, + LocalPlatformProperties.class, + ComposedTaskRunnerConfigurationProperties.class }) @EnableMapRepositories(basePackages = "org.springframework.cloud.dataflow.server.job") @EnableTransactionManagement -@Import(TaskConfiguration.TaskDeleteServiceConfig.class) +@Import({ + TaskConfiguration.TaskDeleteServiceConfig.class, + SchemaServiceConfiguration.class, + AggregateTaskConfiguration.class, + AggregateDataFlowTaskConfiguration.class +}) public class TaskConfiguration { @Autowired @@ -109,6 +120,18 @@ public class TaskConfiguration { @Autowired private ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + @Bean + @ConditionalOnMissingBean + public TaskDefinitionReader taskDefinitionReader(TaskDefinitionRepository taskDefinitionRepository) { + return new DefaultTaskDefinitionReader(taskDefinitionRepository); + } + + @Bean + @ConditionalOnMissingBean + public TaskDeploymentReader taskDeploymentReader(TaskDeploymentRepository repository) { + return new DefaultTaskDeploymentReader(repository); + } + @Bean public DeployerConfigurationMetadataResolver deployerConfigurationMetadataResolver( TaskConfigurationProperties taskConfigurationProperties) { @@ -123,24 +146,18 @@ public LauncherInitializationService launcherInitializationService( return new LauncherInitializationService(launcherRepository, platforms, resolver); } - @Bean - @ConditionalOnMissingBean - public SchemaService schemaService() { - return new DefaultSchemaService(); - } - /** * The default profile is active when no other profiles are active. This is configured so * that several tests will pass without having to explicitly enable the local profile. - * @param localPlatformProperties the local platform properties - * @param localScheduler the local scheduler * + * @param localPlatformProperties the local platform properties + * @param localScheduler the local scheduler * @return the task platform */ - @Profile({ "local", "default" }) + @Profile({"local", "default"}) @Bean public TaskPlatform localTaskPlatform(LocalPlatformProperties localPlatformProperties, - @Nullable Scheduler localScheduler) { + @Nullable Scheduler localScheduler) { TaskPlatform taskPlatform = new LocalTaskPlatformFactory(localPlatformProperties, localScheduler) .createTaskPlatform(); taskPlatform.setPrimary(true); @@ -148,11 +165,15 @@ public TaskPlatform localTaskPlatform(LocalPlatformProperties localPlatformPrope } @Bean - public TaskExecutionInfoService taskDefinitionRetriever(AppRegistryService registry, - TaskExplorer taskExplorer, TaskDefinitionRepository taskDefinitionRepository, + public TaskExecutionInfoService taskDefinitionRetriever( + AppRegistryService registry, + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, TaskConfigurationProperties taskConfigurationProperties, - LauncherRepository launcherRepository, List taskPlatforms, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + LauncherRepository launcherRepository, + List taskPlatforms, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { return new DefaultTaskExecutionInfoService(dataSourceProperties, registry, taskExplorer, taskDefinitionRepository, taskConfigurationProperties, launcherRepository, taskPlatforms, composedTaskRunnerConfigurationProperties); @@ -160,13 +181,16 @@ public TaskExecutionInfoService taskDefinitionRetriever(AppRegistryService regis @Bean public TaskSaveService saveTaskService(TaskDefinitionRepository taskDefinitionRepository, - AuditRecordService auditRecordService, AppRegistryService registry) { + AuditRecordService auditRecordService, AppRegistryService registry) { return new DefaultTaskSaveService(taskDefinitionRepository, auditRecordService, registry); } @Bean - public TaskExecutionCreationService taskExecutionRepositoryService(TaskRepository taskRepository) { - return new DefaultTaskExecutionRepositoryService(taskRepository); + public TaskExecutionCreationService taskExecutionRepositoryService( + TaskRepositoryContainer taskRepositoryContainer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader) { + return new DefaultTaskExecutionRepositoryService(taskRepositoryContainer, aggregateExecutionSupport, taskDefinitionReader); } @Bean @@ -177,35 +201,6 @@ public TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator( metadataResolver, dataflowServerUri); } - @Bean - public SimpleJobServiceFactoryBean simpleJobServiceFactoryBean(DataSource dataSource, - JobRepositoryFactoryBean repositoryFactoryBean, JobExplorer jobExplorer, - PlatformTransactionManager dataSourceTransactionManager) throws Exception { - SimpleJobServiceFactoryBean factoryBean = new SimpleJobServiceFactoryBean(); - factoryBean.setDataSource(dataSource); - factoryBean.setJobRepository(repositoryFactoryBean.getObject()); - factoryBean.setJobLauncher(new SimpleJobLauncher()); - factoryBean.setDataSource(dataSource); - factoryBean.setJobExplorer(jobExplorer); - factoryBean.setTransactionManager(dataSourceTransactionManager); - return factoryBean; - } - - @Bean - public JobExplorerFactoryBean jobExplorerFactoryBean(DataSource dataSource) { - JobExplorerFactoryBean jobExplorerFactoryBean = new JobExplorerFactoryBean(); - jobExplorerFactoryBean.setDataSource(dataSource); - return jobExplorerFactoryBean; - } - - @Bean - public JobRepositoryFactoryBean jobRepositoryFactoryBean(DataSource dataSource, - PlatformTransactionManager platformTransactionManager) { - JobRepositoryFactoryBean repositoryFactoryBean = new JobRepositoryFactoryBean(); - repositoryFactoryBean.setDataSource(dataSource); - repositoryFactoryBean.setTransactionManager(platformTransactionManager); - return repositoryFactoryBean; - } @Configuration public static class TaskExecutionServiceConfig { @Bean @@ -214,23 +209,40 @@ public TaskExecutionService taskService( ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties, LauncherRepository launcherRepository, AuditRecordService auditRecordService, - TaskRepository taskRepository, + TaskRepositoryContainer taskRepositoryContainer, TaskExecutionInfoService taskExecutionInfoService, TaskDeploymentRepository taskDeploymentRepository, TaskExecutionCreationService taskExecutionRepositoryService, TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, + AggregateTaskExplorer taskExplorer, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, @Nullable OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService - ) { + TaskSaveService taskSaveService, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader) { DefaultTaskExecutionService defaultTaskExecutionService = new DefaultTaskExecutionService( - launcherRepository, auditRecordService, taskRepository, - taskExecutionInfoService, taskDeploymentRepository, taskExecutionRepositoryService, - taskAppDeploymentRequestCreator, taskExplorer, dataflowTaskExecutionDao, - dataflowTaskExecutionMetadataDao, oauth2TokenUtilsService, taskSaveService, - taskConfigurationProperties, composedTaskRunnerConfigurationProperties); + launcherRepository, + auditRecordService, + taskRepositoryContainer, + taskExecutionInfoService, + taskDeploymentRepository, + taskDefinitionRepository, + taskDefinitionReader, + taskExecutionRepositoryService, + taskAppDeploymentRequestCreator, + taskExplorer, + dataflowTaskExecutionDaoContainer, + dataflowTaskExecutionMetadataDaoContainer, + dataflowTaskExecutionQueryDao, + oauth2TokenUtilsService, + taskSaveService, + taskConfigurationProperties, + aggregateExecutionSupport, + composedTaskRunnerConfigurationProperties + ); defaultTaskExecutionService.setAutoCreateTaskDefinitions(taskConfigurationProperties.isAutoCreateTaskDefinitions()); return defaultTaskExecutionService; } @@ -239,33 +251,60 @@ public TaskExecutionService taskService( @Configuration(proxyBeanMethods = false) public static class TaskJobServiceConfig { @Bean - public TaskJobService taskJobExecutionRepository(JobService service, TaskExplorer taskExplorer, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionService taskExecutionService, LauncherRepository launcherRepository) { - return new DefaultTaskJobService(service, taskExplorer, taskDefinitionRepository, taskExecutionService, launcherRepository); + public TaskJobService taskJobExecutionRepository( + JobServiceContainer serviceContainer, + AggregateTaskExplorer taskExplorer, + TaskDefinitionRepository taskDefinitionRepository, + TaskExecutionService taskExecutionService, + LauncherRepository launcherRepository, + AggregateExecutionSupport aggregateExecutionSupport, + AggregateJobQueryDao aggregateJobQueryDao, + TaskDefinitionReader taskDefinitionReader + ) { + return new DefaultTaskJobService( + serviceContainer, + taskExplorer, + taskDefinitionRepository, + taskExecutionService, + launcherRepository, + aggregateExecutionSupport, + aggregateJobQueryDao, + taskDefinitionReader + ); } } @Configuration(proxyBeanMethods = false) public static class TaskDeleteServiceConfig { @Bean - public TaskDeleteService deleteTaskService(TaskExplorer taskExplorer, LauncherRepository launcherRepository, - TaskDefinitionRepository taskDefinitionRepository, TaskDeploymentRepository taskDeploymentRepository, + public TaskDeleteService deleteTaskService( + AggregateTaskExplorer taskExplorer, + LauncherRepository launcherRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskDeploymentRepository taskDeploymentRepository, AuditRecordService auditRecordService, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowJobExecutionDao dataflowJobExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowJobExecutionDaoContainer dataflowJobExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, TaskConfigurationProperties taskConfigurationProperties, DataSource dataSource, - @Autowired(required = false) SchedulerService schedulerService) { - return new DefaultTaskDeleteService(taskExplorer, launcherRepository, taskDefinitionRepository, + SchemaService schemaService, + @Autowired(required = false) SchedulerService schedulerService + ) { + return new DefaultTaskDeleteService( + taskExplorer, + launcherRepository, + taskDefinitionRepository, taskDeploymentRepository, auditRecordService, - dataflowTaskExecutionDao, - dataflowJobExecutionDao, - dataflowTaskExecutionMetadataDao, + dataflowTaskExecutionDaoContainer, + dataflowJobExecutionDaoContainer, + dataflowTaskExecutionMetadataDaoContainer, schedulerService, + schemaService, taskConfigurationProperties, - dataSource); + dataSource + ); } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java index ae1540e6b0..deb1a3dda0 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/config/web/WebConfiguration.java @@ -34,9 +34,9 @@ import org.springframework.boot.autoconfigure.http.HttpMessageConverters; import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer; import org.springframework.boot.web.servlet.ServletContextInitializer; -import org.springframework.cloud.dataflow.core.AppBootVersionConverter; import org.springframework.cloud.dataflow.rest.support.jackson.ISO8601DateFormatWithMilliSeconds; import org.springframework.cloud.dataflow.rest.support.jackson.Jackson2DataflowModule; +import org.springframework.cloud.dataflow.schema.AppBootVersionConverter; import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java new file mode 100644 index 0000000000..2cb0a2d214 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/ApiNotSupportedException.java @@ -0,0 +1,8 @@ +package org.springframework.cloud.dataflow.server.controller; + +public class ApiNotSupportedException extends RuntimeException { + + public ApiNotSupportedException(String message) { + super(message); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java index 5a6092c51a..2be29e5342 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/AppRegistryController.java @@ -35,7 +35,6 @@ import org.springframework.boot.configurationmetadata.ConfigurationMetadataProperty; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersion; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.ApplicationType; import org.springframework.cloud.dataflow.core.StreamAppDefinition; @@ -48,6 +47,7 @@ import org.springframework.cloud.dataflow.rest.SkipperStream; import org.springframework.cloud.dataflow.rest.resource.AppRegistrationResource; import org.springframework.cloud.dataflow.rest.resource.DetailedAppRegistrationResource; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; import org.springframework.cloud.dataflow.server.controller.assembler.AppRegistrationAssemblerProvider; import org.springframework.cloud.dataflow.server.repository.InvalidApplicationNameException; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; @@ -235,7 +235,7 @@ public void register( @PathVariable("type") ApplicationType type, @PathVariable("name") String name, @PathVariable("version") String version, - @RequestParam(name = "bootVersion", required = false) AppBootSchemaVersion bootVersion, + @RequestParam(name = "bootVersion", required = false) String bootVersion, @RequestParam("uri") String uri, @RequestParam(name = "metadata-uri", required = false) String metadataUri, @RequestParam(value = "force", defaultValue = "false") boolean force) { @@ -246,9 +246,15 @@ public void register( throw new AppAlreadyRegisteredException(previous); } try { - AppRegistration registration = this.appRegistryService.save(name, type, version, new URI(uri), - metadataUri != null ? new URI(metadataUri) : null, bootVersion); - prefetchMetadata(Arrays.asList(registration)); + AppRegistration registration = this.appRegistryService.save( + name, + type, + version, + new URI(uri), + metadataUri != null ? new URI(metadataUri) : null, + bootVersion != null ? AppBootSchemaVersion.fromBootVersion(bootVersion) : AppBootSchemaVersion.defaultVersion() + ); + prefetchMetadata(Collections.singletonList(registration)); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); @@ -261,12 +267,20 @@ public void register( public void register( @PathVariable("type") ApplicationType type, @PathVariable("name") String name, - @RequestParam(name = "bootVersion", required = false) AppBootSchemaVersion bootVersion, + @RequestParam(name = "bootVersion", required = false) String bootVersion, @RequestParam("uri") String uri, @RequestParam(name = "metadata-uri", required = false) String metadataUri, @RequestParam(value = "force", defaultValue = "false") boolean force) { String version = this.appRegistryService.getResourceVersion(uri); - this.register(type, name, version, bootVersion, uri, metadataUri, force); + this.register( + type, + name, + version, + bootVersion, + uri, + metadataUri, + force + ); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java index aa0b285a1f..9d8a0a9c34 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionController.java @@ -1,5 +1,5 @@ /* - * Copyright 2016 the original author or authors. + * Copyright 2016=2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,7 +16,6 @@ package org.springframework.cloud.dataflow.server.controller; -import java.util.List; import java.util.TimeZone; import org.springframework.batch.core.BatchStatus; @@ -29,17 +28,19 @@ import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.support.TimeUtils; import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -47,12 +48,16 @@ import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for operations on {@link org.springframework.batch.core.JobExecution}. This * includes obtaining Job execution information from the job explorer. * * @author Glenn Renfro * @author Gunnar Hillert + * @author Corneil du Plessis */ @RestController @RequestMapping("/jobs/executions") @@ -68,7 +73,7 @@ public class JobExecutionController { * a the {@link JobService} * * @param taskJobService the service this controller will use for retrieving job execution - * information. Must not be null. + * information. Must not be null. */ public JobExecutionController(TaskJobService taskJobService) { Assert.notNull(taskJobService, "taskJobService must not be null"); @@ -78,13 +83,13 @@ public JobExecutionController(TaskJobService taskJobService) { /** * Retrieve all task job executions with the task name specified * - * @param jobName name of the job. SQL server specific wildcards are enabled (eg.: myJob%, - * m_Job, ...) - * @param status Optional status criteria. - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param jobName name of the job. SQL server specific wildcards are enabled (eg.: myJob%, + * m_Job, ...) + * @param status Optional status criteria. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. - * @throws NoSuchJobException if the job with the given name does not exist. + * @throws NoSuchJobException if the job with the given name does not exist. * @throws NoSuchJobExecutionException if the job execution doesn't exist. */ @RequestMapping(value = "", method = RequestMethod.GET, produces = "application/json") @@ -93,19 +98,9 @@ public PagedModel retrieveJobsByParameters( @RequestParam(value = "name", required = false) String jobName, @RequestParam(value = "status", required = false) BatchStatus status, Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException, NoSuchJobExecutionException { - List jobExecutions; - Page page; - - if (jobName == null && status == null) { - jobExecutions = taskJobService.listJobExecutions(pageable); - page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutions()); - } else { - jobExecutions = taskJobService.listJobExecutionsForJob(pageable, jobName, status); - page = new PageImpl<>(jobExecutions, pageable, - taskJobService.countJobExecutionsForJob(jobName, status)); - } - - return assembler.toModel(page, jobAssembler); + Page jobExecutions = jobName == null && status == null ? taskJobService.listJobExecutions(pageable) + : taskJobService.listJobExecutionsForJob(pageable, jobName, status); + return assembler.toModel(jobExecutions, jobAssembler); } /** @@ -114,14 +109,18 @@ public PagedModel retrieveJobsByParameters( * @param id the id of the requested {@link JobExecution} * @return the {@link JobExecution} * @throws NoSuchJobExecutionException if the specified job execution for the id does not - * exist. + * exist. */ @RequestMapping(value = "/{id}", method = RequestMethod.GET, produces = "application/json") @ResponseStatus(HttpStatus.OK) - public JobExecutionResource view(@PathVariable("id") long id) throws NoSuchJobExecutionException { - TaskJobExecution jobExecution = taskJobService.getJobExecution(id); + public JobExecutionResource view(@PathVariable("id") long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget) throws NoSuchJobExecutionException { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskJobExecution jobExecution = taskJobService.getJobExecution(id, schemaTarget); if (jobExecution == null) { - throw new NoSuchJobExecutionException(String.format("No Job Execution with id of %d exits", id)); + throw new NoSuchJobExecutionException(String.format("No Job Execution with id of %d exits for schema target %s", id, schemaTarget)); } return jobAssembler.toModel(jobExecution); } @@ -132,14 +131,17 @@ public JobExecutionResource view(@PathVariable("id") long id) throws NoSuchJobEx * * @param jobExecutionId the executionId of the job execution to stop. * @throws JobExecutionNotRunningException if a stop is requested on a job that is not - * running. - * @throws NoSuchJobExecutionException if the job execution id specified does not exist. + * running. + * @throws NoSuchJobExecutionException if the job execution id specified does not exist. */ - @RequestMapping(value = { "/{executionId}" }, method = RequestMethod.PUT, params = "stop=true") - @ResponseStatus(HttpStatus.OK) - public void stopJobExecution(@PathVariable("executionId") long jobExecutionId) - throws NoSuchJobExecutionException, JobExecutionNotRunningException { - taskJobService.stopJobExecution(jobExecutionId); + @RequestMapping(value = {"/{executionId}"}, method = RequestMethod.PUT, params = "stop=true") + + public ResponseEntity stopJobExecution( + @PathVariable("executionId") long jobExecutionId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchJobExecutionException, JobExecutionNotRunningException { + taskJobService.stopJobExecution(jobExecutionId, schemaTarget); + return ResponseEntity.ok().build(); } /** @@ -148,13 +150,16 @@ public void stopJobExecution(@PathVariable("executionId") long jobExecutionId) * * @param jobExecutionId the executionId of the job execution to restart * @throws NoSuchJobExecutionException if the job execution for the jobExecutionId - * specified does not exist. + * specified does not exist. */ - @RequestMapping(value = { "/{executionId}" }, method = RequestMethod.PUT, params = "restart=true") + @RequestMapping(value = {"/{executionId}"}, method = RequestMethod.PUT, params = "restart=true") @ResponseStatus(HttpStatus.OK) - public void restartJobExecution(@PathVariable("executionId") long jobExecutionId) - throws NoSuchJobExecutionException { - taskJobService.restartJobExecution(jobExecutionId); + public ResponseEntity restartJobExecution( + @PathVariable("executionId") long jobExecutionId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchJobExecutionException { + taskJobService.restartJobExecution(jobExecutionId, schemaTarget); + return ResponseEntity.ok().build(); } /** @@ -180,12 +185,24 @@ public void setTimeZone(TimeZone timeZone) { @Override public JobExecutionResource toModel(TaskJobExecution taskJobExecution) { - return createModelWithId(taskJobExecution.getJobExecution().getId(), taskJobExecution); + return instantiateModel(taskJobExecution); } @Override public JobExecutionResource instantiateModel(TaskJobExecution taskJobExecution) { - return new JobExecutionResource(taskJobExecution, timeZone); + JobExecutionResource resource = new JobExecutionResource(taskJobExecution, timeZone); + try { + resource.add(linkTo(methodOn(JobExecutionController.class).view(taskJobExecution.getTaskId(), taskJobExecution.getSchemaTarget())).withSelfRel()); + if (taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).stopJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("stop")); + } + if (!taskJobExecution.getJobExecution().getStatus().equals(BatchStatus.COMPLETED)) { + resource.add(linkTo(methodOn(JobExecutionController.class).restartJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("restart")); + } + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + throw new RuntimeException(e); + } + return resource; } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java index 1609193fcc..d61fc8fb31 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobExecutionThinController.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,10 +17,10 @@ package org.springframework.cloud.dataflow.server.controller; import java.util.Date; -import java.util.List; import java.util.TimeZone; import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.launch.JobExecutionNotRunningException; import org.springframework.batch.core.launch.NoSuchJobException; import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.beans.factory.annotation.Autowired; @@ -28,10 +28,10 @@ import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.support.TimeUtils; import org.springframework.cloud.dataflow.rest.resource.JobExecutionThinResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.format.annotation.DateTimeFormat; @@ -40,17 +40,22 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Controller for retrieving {@link JobExecution}s where the step executions are * not included in the results that are returned. * * @author Glenn Renfro + * @author Corneil du Plessis * * @since 2.0 */ @@ -68,7 +73,7 @@ public class JobExecutionThinController { * from a the {@link JobService} * * @param taskJobService the service this controller will use for retrieving job - * execution information. Must not be null. + * execution information. Must not be null. */ @Autowired public JobExecutionThinController(TaskJobService taskJobService) { @@ -80,104 +85,115 @@ public JobExecutionThinController(TaskJobService taskJobService) { * Return a page-able list of {@link JobExecutionThinResource} defined jobs that * do not contain step execution detail. * - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return a list of Task/Job executions(job executions do not contain step executions. * @throws NoSuchJobExecutionException in the event that a job execution id specified - * is not present when looking up stepExecutions for the result. + * is not present when looking up stepExecutions for the result. */ @RequestMapping(value = "", method = RequestMethod.GET, produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel listJobsOnly(Pageable pageable, - PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { - List jobExecutions = taskJobService.listJobExecutionsWithStepCount(pageable); - Page page = new PageImpl<>(jobExecutions, pageable, taskJobService.countJobExecutions()); - return assembler.toModel(page, jobAssembler); + PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { + Page jobExecutions = taskJobService.listJobExecutionsWithStepCount(pageable); + return assembler.toModel(jobExecutions, jobAssembler); } + /** * Retrieve all task job executions with the task name specified * - * @param jobName name of the job - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param jobName name of the job + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "name", produces = "application/json") @ResponseStatus(HttpStatus.OK) - public PagedModel retrieveJobsByName(@RequestParam("name") String jobName, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, jobName); - Page page = new PageImpl<>(jobExecutions, pageable, - taskJobService.countJobExecutionsForJob(jobName, null)); - return assembler.toModel(page, jobAssembler); + public PagedModel retrieveJobsByName( + @RequestParam("name") String jobName, + Pageable pageable, + PagedResourcesAssembler assembler) throws NoSuchJobException { + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, jobName); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the date range specified * - * @param fromDate the date which start date must be greater than. - * @param toDate the date which start date must be less than. - * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param fromDate the date which start date must be greater than. + * @param toDate the date which start date must be less than. + * @param pageable page-able collection of {@code TaskJobExecution}s. * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ - @RequestMapping(value = "", method = RequestMethod.GET, params = { "fromDate", - "toDate" }, produces = "application/json") + @RequestMapping(value = "", method = RequestMethod.GET, params = {"fromDate", + "toDate"}, produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByDateRange( @RequestParam("fromDate") @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date fromDate, @RequestParam("toDate") @DateTimeFormat(pattern = TimeUtils.DEFAULT_DATAFLOW_DATE_TIME_PARAMETER_FORMAT_PATTERN) Date toDate, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, fromDate, - toDate); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + Pageable pageable, + PagedResourcesAssembler assembler + ) throws NoSuchJobException { + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCount(pageable, fromDate, toDate); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the job instance id specified * * @param jobInstanceId the job instance id associated with the execution. - * @param pageable page-able collection of {@code TaskJobExecution}s. - * @param assembler for the {@link TaskJobExecution}s + * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "jobInstanceId", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByJobInstanceId( - @RequestParam("jobInstanceId") int jobInstanceId, Pageable pageable, + @RequestParam("jobInstanceId") int jobInstanceId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService - .listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(pageable, jobInstanceId); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + Page jobExecutions = taskJobService + .listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(pageable, jobInstanceId, schemaTarget); + return assembler.toModel(jobExecutions, jobAssembler); } /** * Retrieve all task job executions filtered with the task execution id specified * * @param taskExecutionId the task execution id associated with the execution. - * @param pageable page-able collection of {@code TaskJobExecution}s. - * @param assembler for the {@link TaskJobExecution}s + * @param pageable page-able collection of {@code TaskJobExecution}s. + * @param assembler for the {@link TaskJobExecution}s * @return list task/job executions with the specified jobName. * @throws NoSuchJobException if the job with the given name does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "taskExecutionId", produces = "application/json") @ResponseStatus(HttpStatus.OK) public PagedModel retrieveJobsByTaskExecutionId( - @RequestParam("taskExecutionId") int taskExecutionId, Pageable pageable, + @RequestParam("taskExecutionId") int taskExecutionId, + @RequestParam(value = "schemaTarget", required = false) String schemaTarget, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobExecutions = taskJobService - .listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(pageable, taskExecutionId); - Page page = new PageImpl<>(jobExecutions, pageable, jobExecutions.size()); - return assembler.toModel(page, jobAssembler); + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + Page jobExecutions = taskJobService.listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId( + pageable, + taskExecutionId, + schemaTarget + ); + return assembler.toModel(jobExecutions, jobAssembler); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link JobExecution}s to {@link JobExecutionThinResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { @@ -199,12 +215,24 @@ public void setTimeZone(TimeZone timeZone) { @Override public JobExecutionThinResource toModel(TaskJobExecution taskJobExecution) { - return createModelWithId(taskJobExecution.getJobExecution().getId(), taskJobExecution); + return instantiateModel(taskJobExecution); } @Override public JobExecutionThinResource instantiateModel(TaskJobExecution taskJobExecution) { - return new JobExecutionThinResource(taskJobExecution, timeZone); + JobExecutionThinResource resource = new JobExecutionThinResource(taskJobExecution, timeZone); + try { + resource.add(linkTo(methodOn(JobExecutionController.class).view(taskJobExecution.getTaskId(), taskJobExecution.getSchemaTarget())).withSelfRel()); + if (taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).stopJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("stop")); + } + if (taskJobExecution.getJobExecution().getEndTime() != null && !taskJobExecution.getJobExecution().isRunning()) { + resource.add(linkTo(methodOn(JobExecutionController.class).restartJobExecution(taskJobExecution.getJobExecution().getJobId(), taskJobExecution.getSchemaTarget())).withRel("restart")); + } + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + throw new RuntimeException(e); + } + return resource; } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java index c58ccd6f33..0cd9a16ac5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobInstanceController.java @@ -31,9 +31,9 @@ import org.springframework.cloud.dataflow.rest.job.support.TimeUtils; import org.springframework.cloud.dataflow.rest.resource.JobExecutionResource; import org.springframework.cloud.dataflow.rest.resource.JobInstanceResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; import org.springframework.hateoas.PagedModel; @@ -41,6 +41,7 @@ import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -68,7 +69,7 @@ public class JobInstanceController { * Creates a {@code JobInstanceController} that retrieves Job Instance information. * * @param taskJobService the {@link TaskJobService} used for retrieving batch instance - * data. + * data. */ @Autowired public JobInstanceController(TaskJobService taskJobService) { @@ -79,20 +80,20 @@ public JobInstanceController(TaskJobService taskJobService) { /** * Return a page-able list of {@link JobInstanceResource} defined jobs. * - * @param jobName the name of the job - * @param pageable page-able collection of {@link JobInstance}s. + * @param jobName the name of the job + * @param pageable page-able collection of {@link JobInstance}s. * @param assembler for the {@link JobInstance}s * @return a list of Job Instance * @throws NoSuchJobException if the job for jobName specified does not exist. */ @RequestMapping(value = "", method = RequestMethod.GET, params = "name") @ResponseStatus(HttpStatus.OK) - public PagedModel list(@RequestParam("name") String jobName, Pageable pageable, + public PagedModel list( + @RequestParam("name") String jobName, + Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobException { - List jobInstances = taskJobService.listTaskJobInstancesForJobName(pageable, jobName); - Page page = new PageImpl<>(jobInstances, pageable, - taskJobService.countJobInstances(jobName)); - return assembler.toModel(page, jobAssembler); + Page jobInstances = taskJobService.listTaskJobInstancesForJobName(pageable, jobName); + return assembler.toModel(jobInstances, jobAssembler); } /** @@ -101,12 +102,21 @@ public PagedModel list(@RequestParam("name") String jobName * @param id the id of the requested {@link JobInstance} * @return the {@link JobInstance} * @throws NoSuchJobInstanceException if job instance for the id does not exist. - * @throws NoSuchJobException if the job for the job instance does not exist. + * @throws NoSuchJobException if the job for the job instance does not exist. */ @RequestMapping(value = "/{id}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public JobInstanceResource view(@PathVariable("id") long id) throws NoSuchJobInstanceException, NoSuchJobException { - JobInstanceExecutions jobInstance = taskJobService.getJobInstance(id); + public JobInstanceResource view( + @PathVariable("id") long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchJobInstanceException, NoSuchJobException { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + JobInstanceExecutions jobInstance = taskJobService.getJobInstance(id, schemaTarget); + if (jobInstance == null) { + throw new NoSuchJobInstanceException(String.format("No job instance for id '%d' and schema target '%s'", id, schemaTarget)); + } return jobAssembler.toModel(jobInstance); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java index 73a700c241..280268eba2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionController.java @@ -23,10 +23,14 @@ import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; import org.springframework.cloud.dataflow.server.job.support.StepExecutionResourceBuilder; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; @@ -35,13 +39,18 @@ import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; +import org.springframework.lang.NonNull; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; + /** * @author Glenn Renfro */ @@ -50,21 +59,19 @@ @ExposesResourceFor(StepExecutionResource.class) public class JobStepExecutionController { - private final JobService jobService; - private final Assembler stepAssembler = new Assembler(); + private final TaskJobService taskJobService; /** * Creates a {@code JobStepExecutionsController} that retrieves Job Step Execution - * information from a the {@link JobService} + * information from a the {@link JobServiceContainer} * - * @param jobService the service this controller will use for retrieving job step - * execution information. + * @param taskJobService TaskJobService can query all schemas. */ @Autowired - public JobStepExecutionController(JobService jobService) { - Assert.notNull(jobService, "repository must not be null"); - this.jobService = jobService; + public JobStepExecutionController(TaskJobService taskJobService) { + Assert.notNull(taskJobService, "taskJobService required"); + this.taskJobService = taskJobService; } /** @@ -79,11 +86,19 @@ public JobStepExecutionController(JobService jobService) { */ @RequestMapping(value = { "" }, method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel stepExecutions(@PathVariable("jobExecutionId") long id, - Pageable pageable, PagedResourcesAssembler assembler) throws NoSuchJobExecutionException { - List result; - result = new ArrayList<>(jobService.getStepExecutions(id)); + public PagedModel stepExecutions( + @PathVariable("jobExecutionId") long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget, + Pageable pageable, + PagedResourcesAssembler assembler + ) throws NoSuchJobExecutionException { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskJobExecution taskJobExecution = taskJobService.getJobExecution(id, schemaTarget); + List result = new ArrayList<>(taskJobExecution.getJobExecution().getStepExecutions()); Page page = new PageImpl<>(result, pageable, result.size()); + final Assembler stepAssembler = new Assembler(schemaTarget); return assembler.toModel(page, stepAssembler); } @@ -99,30 +114,43 @@ public PagedModel stepExecutions(@PathVariable("jobExecut */ @RequestMapping(value = { "/{stepExecutionId}" }, method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public StepExecutionResource getStepExecution(@PathVariable("jobExecutionId") Long id, - @PathVariable("stepExecutionId") Long stepId) + public StepExecutionResource getStepExecution( + @PathVariable("jobExecutionId") Long id, + @PathVariable("stepExecutionId") Long stepId, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget) throws NoSuchStepExecutionException, NoSuchJobExecutionException { - return stepAssembler.toModel(jobService.getStepExecution(id, stepId)); + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskJobExecution taskJobExecution = taskJobService.getJobExecution(id, schemaTarget); + final Assembler stepAssembler = new Assembler(schemaTarget); + StepExecution stepExecution = taskJobExecution.getJobExecution().getStepExecutions() + .stream() + .filter(s -> s.getId().equals(stepId)) + .findFirst() + .orElseThrow(() -> new NoSuchStepExecutionException("Step " + stepId + " in Job " + id + " not found")); + return stepAssembler.toModel(stepExecution); } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link StepExecution}s to {@link StepExecutionResource}s. */ private static class Assembler extends RepresentationModelAssemblerSupport { - - public Assembler() { + private final String schemaTarget; + public Assembler(String schemaTarget) { super(JobStepExecutionController.class, StepExecutionResource.class); + this.schemaTarget = schemaTarget; } @Override public StepExecutionResource toModel(StepExecution stepExecution) { - return createModelWithId(stepExecution.getId(), stepExecution, stepExecution.getJobExecution().getId()); + return StepExecutionResourceBuilder.toModel(stepExecution, schemaTarget); } @Override public StepExecutionResource instantiateModel(StepExecution stepExecution) { - return StepExecutionResourceBuilder.toModel(stepExecution); + return StepExecutionResourceBuilder.toModel(stepExecution, schemaTarget); } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java index 40e18ca258..9dc1d505d8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/JobStepExecutionProgressController.java @@ -21,20 +21,28 @@ import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.dataflow.rest.job.StepExecutionHistory; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.resource.StepExecutionProgressInfoResource; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.batch.JobService; import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; import org.springframework.cloud.dataflow.server.job.support.StepExecutionProgressInfo; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; -import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestController; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * @author Glenn Renfro */ @@ -43,54 +51,65 @@ @ExposesResourceFor(StepExecutionProgressInfoResource.class) public class JobStepExecutionProgressController { - private final JobService jobService; + private final TaskJobService taskJobService; - private final Assembler stepAssembler = new Assembler(); + private final JobServiceContainer jobServiceContainer; /** * Creates a {@code JobStepProgressInfoExecutionsController} that retrieves Job Step - * Progress Execution information from a the {@link JobService} + * Progress Execution information from a the {@link JobServiceContainer} * - * @param jobService the service this controller will use for retrieving job step - * progress execution information. + * @param jobServiceContainer A container of JobServices that this controller will use for retrieving job step + * progress execution information. + * @param taskJobService Queries both schemas. */ @Autowired - public JobStepExecutionProgressController(JobService jobService) { - Assert.notNull(jobService, "repository must not be null"); - this.jobService = jobService; + public JobStepExecutionProgressController(JobServiceContainer jobServiceContainer, TaskJobService taskJobService) { + this.taskJobService = taskJobService; + this.jobServiceContainer = jobServiceContainer; } /** * Get the step execution progress for the given jobExecutions step. * - * @param jobExecutionId Id of the {@link JobExecution}, must not be null + * @param jobExecutionId Id of the {@link JobExecution}, must not be null * @param stepExecutionId Id of the {@link StepExecution}, must not be null * @return {@link StepExecutionProgressInfoResource} that has the progress info on the * given {@link StepExecution}. - * @throws NoSuchJobExecutionException Thrown if the respective {@link JobExecution} - * does not exist + * @throws NoSuchJobExecutionException Thrown if the respective {@link JobExecution} + * does not exist * @throws NoSuchStepExecutionException Thrown if the respective {@link StepExecution} - * does not exist + * does not exist */ @RequestMapping(value = "/{stepExecutionId}/progress", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutionId, - @PathVariable long stepExecutionId) throws NoSuchStepExecutionException, NoSuchJobExecutionException { + public StepExecutionProgressInfoResource progress( + @PathVariable long jobExecutionId, + @PathVariable long stepExecutionId, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) throws NoSuchStepExecutionException, NoSuchJobExecutionException { try { - StepExecution stepExecution = jobService.getStepExecution(jobExecutionId, stepExecutionId); + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + TaskJobExecution taskJobExecution = taskJobService.getJobExecution(jobExecutionId, schemaTarget); + StepExecution stepExecution = taskJobExecution.getJobExecution().getStepExecutions() + .stream() + .filter(s -> s.getId().equals(stepExecutionId)) + .findFirst() + .orElseThrow(() -> new NoSuchStepExecutionException("Step execution " + stepExecutionId + " for Job " + jobExecutionId + " not found")); String stepName = stepExecution.getStepName(); if (stepName.contains(":partition")) { // assume we want to compare all partitions stepName = stepName.replaceAll("(:partition).*", "$1*"); } String jobName = stepExecution.getJobExecution().getJobInstance().getJobName(); - StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName); + StepExecutionHistory stepExecutionHistory = computeHistory(jobName, stepName, schemaTarget); + final Assembler stepAssembler = new Assembler(schemaTarget); return stepAssembler.toModel(new StepExecutionProgressInfo(stepExecution, stepExecutionHistory)); - } - catch (NoSuchStepExecutionException e) { + } catch (NoSuchStepExecutionException e) { throw new NoSuchStepExecutionException(String.valueOf(stepExecutionId)); - } - catch (NoSuchJobExecutionException e) { + } catch (NoSuchJobExecutionException e) { throw new NoSuchJobExecutionException(String.valueOf(jobExecutionId)); } } @@ -98,11 +117,12 @@ public StepExecutionProgressInfoResource progress(@PathVariable long jobExecutio /** * Compute step execution history for the given jobs step. * - * @param jobName the name of the job + * @param jobName the name of the job * @param stepName the name of the step * @return the step execution history for the given step */ - private StepExecutionHistory computeHistory(String jobName, String stepName) { + private StepExecutionHistory computeHistory(String jobName, String stepName, String schemaTarget) { + JobService jobService = jobServiceContainer.get(schemaTarget); int total = jobService.countStepExecutionsForStep(jobName, stepName); StepExecutionHistory stepExecutionHistory = new StepExecutionHistory(stepName); for (int i = 0; i < total; i += 1000) { @@ -114,14 +134,16 @@ private StepExecutionHistory computeHistory(String jobName, String stepName) { } /** - * {@link org.springframework.hateoas.server.ResourceAssembler} implementation that converts + * {@link org.springframework.hateoas.server.RepresentationModelAssembler} implementation that converts * {@link StepExecutionProgressInfo}s to a {@link StepExecutionProgressInfoResource}. */ private static class Assembler extends RepresentationModelAssemblerSupport { + private final String schemaTarget; - public Assembler() { + public Assembler(String schemaTarget) { super(JobStepExecutionProgressController.class, StepExecutionProgressInfoResource.class); + this.schemaTarget = schemaTarget; } @Override @@ -132,8 +154,23 @@ public StepExecutionProgressInfoResource toModel(StepExecutionProgressInfo entit @Override protected StepExecutionProgressInfoResource instantiateModel(StepExecutionProgressInfo entity) { - return new StepExecutionProgressInfoResource(entity.getStepExecution(), entity.getStepExecutionHistory(), + StepExecutionProgressInfoResource resource = new StepExecutionProgressInfoResource(entity.getStepExecution(), entity.getStepExecutionHistory(), entity.getEstimatedPercentComplete(), entity.isFinished(), entity.getDuration()); + addLink(resource); + return resource; + } + + private void addLink(StepExecutionProgressInfoResource resource) { + try { + resource.add( + linkTo( + methodOn(JobStepExecutionProgressController.class) + .progress(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId(), schemaTarget) + ).withRel("progress") + ); + } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) { + throw new RuntimeException(e); + } } } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/NoSuchSchemaTargetException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/NoSuchSchemaTargetException.java index 1f5cbbfe45..aab2b520e3 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/NoSuchSchemaTargetException.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/NoSuchSchemaTargetException.java @@ -17,7 +17,7 @@ package org.springframework.cloud.dataflow.server.controller; /** - * Exception will be thrown by query for {@link org.springframework.cloud.dataflow.core.SchemaVersionTarget} + * Exception will be thrown by query for {@link org.springframework.cloud.dataflow.schema.SchemaVersionTarget} * that doesn't exist. * @author Corneil du Plessis */ diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java index 568f0b4de0..0ebe4f09d2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RestControllerAdvice.java @@ -95,14 +95,20 @@ public VndErrors onException(Exception e) { * * @param e one of the exceptions, {@link AppAlreadyRegisteredException}, * {@link DuplicateStreamDefinitionException}, {@link DuplicateTaskException}, - * {@link StreamAlreadyDeployedException}, {@link StreamAlreadyDeployingException}, or - * {@link StreamAlreadyDeployingException} + * {@link StreamAlreadyDeployedException}, {@link StreamAlreadyDeployingException}, + * {@link StreamAlreadyDeployingException}, or {@link ApiNotSupportedException} * @return the error response in JSON format with media type * application/vnd.error+json */ - @ExceptionHandler({ AppAlreadyRegisteredException.class, DuplicateStreamDefinitionException.class, - DuplicateTaskException.class, StreamAlreadyDeployedException.class, StreamAlreadyDeployingException.class, - UnregisterAppException.class, InvalidCTRLaunchRequestException.class}) + @ExceptionHandler({ + AppAlreadyRegisteredException.class, + DuplicateStreamDefinitionException.class, + DuplicateTaskException.class, + StreamAlreadyDeployedException.class, + StreamAlreadyDeployingException.class, + UnregisterAppException.class, + InvalidCTRLaunchRequestException.class + }) @ResponseStatus(HttpStatus.CONFLICT) @ResponseBody public VndErrors onConflictException(Exception e) { @@ -182,7 +188,7 @@ public VndErrors onNotFoundException(Exception e) { * @return the error response in JSON format with media type * application/vnd.error+json */ - @ExceptionHandler({ MissingServletRequestParameterException.class, HttpMessageNotReadableException.class, + @ExceptionHandler({ ApiNotSupportedException.class,MissingServletRequestParameterException.class, HttpMessageNotReadableException.class, UnsatisfiedServletRequestParameterException.class, MethodArgumentTypeMismatchException.class, InvalidDateRangeException.class, CannotDeleteNonParentTaskExecutionException.class, InvalidStreamDefinitionException.class, CreateScheduleException.class, OffsetOutOfBoundsException.class, diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java index 642ac2920a..447c7585b9 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RootController.java @@ -145,17 +145,18 @@ public RootResource info() { root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskDefinitionResource.class, "{name}") .withRel("tasks/definitions/definition"))); root.add(entityLinks.linkToCollectionResource(TaskExecutionResource.class).withRel("tasks/executions")); + root.add(linkTo(methodOn(TaskExecutionController.class).viewByExternal(null,null)).withRel("tasks/executions/external")); + root.add(linkTo(methodOn(TaskExecutionController.class).launchBoot3(null,null,null)).withRel("tasks/executions/launch")); String taskTemplated = entityLinks.linkToCollectionResource(TaskExecutionResource.class).getHref() + "{?name}"; root.add(Link.of(taskTemplated).withRel("tasks/executions/name")); root.add(linkTo(methodOn(TaskExecutionController.class) .getCurrentTaskExecutionsInfo()).withRel("tasks/executions/current")); - root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskExecutionResource.class, "{id}") - .withRel("tasks/executions/execution"))); + root.add(unescapeTemplateVariables(linkTo(methodOn(TaskExecutionController.class).view(null,null)).withRel("tasks/executions/execution"))); root.add(unescapeTemplateVariables(entityLinks.linkToItemResource(TaskAppStatusResource.class, "{name}") .withRel("tasks/validation"))); root.add(linkTo(methodOn(TasksInfoController.class).getInfo(null, null)).withRel("tasks/info/executions")); - root.add(linkTo(methodOn(TaskLogsController.class).getLog(null, null)).withRel("tasks/logs")); + root.add(linkTo(methodOn(TaskLogsController.class).getLog(null, null, null)).withRel("tasks/logs")); if (featuresProperties.isSchedulesEnabled()) { root.add(entityLinks.linkToCollectionResource(ScheduleInfoResource.class).withRel("tasks/schedules")); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java index a493af004c..db4ceb75fb 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/RuntimeStreamsController.java @@ -82,8 +82,11 @@ public RuntimeStreamsController(StreamDeployer streamDeployer) { * @return a paged model for stream statuses */ @RequestMapping(method = RequestMethod.GET) - public PagedModel status(@RequestParam(value = "names", required = false) String[] names, Pageable pageable, - PagedResourcesAssembler>> assembler) { + public PagedModel status( + @RequestParam(value = "names", required = false) String[] names, + Pageable pageable, + PagedResourcesAssembler>> assembler + ) { List streamNames = (names!= null) ? Arrays.asList(names): new ArrayList<>(); if (streamNames.isEmpty()) { streamNames = this.streamDeployer.getStreams(); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/SchemaController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/SchemaController.java index 36a879baca..e7a6bd6028 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/SchemaController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/SchemaController.java @@ -19,12 +19,12 @@ import java.util.List; import java.util.stream.Collectors; -import org.springframework.cloud.dataflow.core.AppBootSchemaVersions; -import org.springframework.cloud.dataflow.core.SchemaVersionTarget; -import org.springframework.cloud.dataflow.core.SchemaVersionTargets; import org.springframework.cloud.dataflow.rest.resource.SchemaVersionTargetResource; import org.springframework.cloud.dataflow.rest.resource.SchemaVersionTargetsResource; -import org.springframework.cloud.dataflow.server.service.SchemaService; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersions; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.SchemaVersionTargets; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.hateoas.server.RepresentationModelAssembler; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; @@ -65,12 +65,12 @@ public SchemaVersionTargetsResource getTargets() { return targetsAssembler.toModel(schemaService.getTargets()); } - @RequestMapping(value = "/targets/{schemaTargetName}", method = RequestMethod.GET) + @RequestMapping(value = "/targets/{schemaTarget}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public SchemaVersionTargetResource getTarget(@PathVariable("schemaTargetName") String schemaTargetName) { - SchemaVersionTarget target = schemaService.getTarget(schemaTargetName); + public SchemaVersionTargetResource getTarget(@PathVariable("schemaTarget") String schemaTarget) { + SchemaVersionTarget target = schemaService.getTarget(schemaTarget); if (target == null) { - throw new NoSuchSchemaTargetException(schemaTargetName); + throw new NoSuchSchemaTargetException(schemaTarget); } return targetAssembler.toModel(target); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java index fe445e4a38..3f68dd3edb 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDefinitionController.java @@ -92,16 +92,16 @@ public class StreamDefinitionController { /** * Create a {@code StreamDefinitionController} that delegates to {@link StreamService}. * - * @param streamService the stream service to use - * @param streamDefinitionService the stream definition service to use - * @param appRegistryService the app registry service to use + * @param streamService the stream service to use + * @param streamDefinitionService the stream definition service to use + * @param appRegistryService the app registry service to use * @param streamDefinitionAssemblerProvider the stream definition assembler provider to use - * @param appRegistrationAssemblerProvider the app registry assembler provider to use - * */ + * @param appRegistrationAssemblerProvider the app registry assembler provider to use + */ public StreamDefinitionController(StreamService streamService, StreamDefinitionService streamDefinitionService, - AppRegistryService appRegistryService, - StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, - AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { + AppRegistryService appRegistryService, + StreamDefinitionAssemblerProvider streamDefinitionAssemblerProvider, + AppRegistrationAssemblerProvider appRegistrationAssemblerProvider) { Assert.notNull(streamService, "StreamService must not be null"); Assert.notNull(streamDefinitionService, "StreamDefinitionService must not be null"); Assert.notNull(appRegistryService, "AppRegistryService must not be null"); @@ -117,15 +117,18 @@ public StreamDefinitionController(StreamService streamService, StreamDefinitionS /** * Return a page-able list of {@link StreamDefinitionResource} defined streams. * - * @param pageable Pagination information + * @param pageable Pagination information * @param assembler assembler for {@link StreamDefinition} - * @param search optional findByTaskNameContains parameter + * @param search optional findByTaskNameContains parameter * @return list of stream definitions */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, - @RequestParam(required = false) String search, PagedResourcesAssembler assembler) { + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) String search, + PagedResourcesAssembler assembler + ) { Page streamDefinitions = this.streamService.findDefinitionByNameContains(pageable, search); return assembler.toModel(streamDefinitions, this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(streamDefinitions.getContent())); @@ -137,23 +140,25 @@ public PagedModel list(Pageable pageable, * Differs from {@link #saveWithDeployProps} by accepting deployment properties and consuming * {@link MediaType#APPLICATION_FORM_URLENCODED} request content (required by the Dataflow Shell). * - * @param name stream name - * @param dsl DSL definition for stream - * @param deploy if {@code true}, the stream is deployed upon creation (default is - * {@code false}) + * @param name stream name + * @param dsl DSL definition for stream + * @param deploy if {@code true}, the stream is deployed upon creation (default is + * {@code false}) * @param description description of the stream definition * @return the created stream definition * @throws DuplicateStreamDefinitionException if a stream definition with the same name - * already exists - * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, - * resolving the name, or type of applications in the stream + * already exists + * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, + * resolving the name, or type of applications in the stream */ @RequestMapping(value = "", method = RequestMethod.POST, consumes = MediaType.APPLICATION_FORM_URLENCODED_VALUE) @ResponseStatus(HttpStatus.CREATED) - public StreamDefinitionResource save(@RequestParam("name") String name, - @RequestParam("definition") String dsl, - @RequestParam(value = "description", defaultValue = "") String description, - @RequestParam(value = "deploy", defaultValue = "false") boolean deploy) { + public StreamDefinitionResource save( + @RequestParam("name") String name, + @RequestParam("definition") String dsl, + @RequestParam(value = "description", defaultValue = "") String description, + @RequestParam(value = "deploy", defaultValue = "false") boolean deploy + ) { StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy, null); return ((RepresentationModelAssembler) this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(Collections.singletonList(streamDefinition))).toModel(streamDefinition); @@ -165,25 +170,27 @@ public StreamDefinitionResource save(@RequestParam("name") String name, * Differs from {@link #save} by accepting deployment properties and consuming * {@link MediaType#APPLICATION_JSON} request content. * - * @param name stream name - * @param dsl DSL definition for stream - * @param deploy if {@code true}, the stream is deployed upon creation (default is - * {@code false}) + * @param name stream name + * @param dsl DSL definition for stream + * @param deploy if {@code true}, the stream is deployed upon creation (default is + * {@code false}) * @param deploymentProperties the optional deployment properties to use when the stream is deployed upon creation - * @param description description of the stream definition + * @param description description of the stream definition * @return the created stream definition * @throws DuplicateStreamDefinitionException if a stream definition with the same name - * already exists - * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, - * resolving the name, or type of applications in the stream + * already exists + * @throws InvalidStreamDefinitionException if there are errors parsing the stream DSL, + * resolving the name, or type of applications in the stream */ @RequestMapping(value = "", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE) @ResponseStatus(HttpStatus.CREATED) - public StreamDefinitionResource saveWithDeployProps(@RequestParam("name") String name, - @RequestParam("definition") String dsl, - @RequestParam(value = "description", defaultValue = "") String description, - @RequestParam(value = "deploy", defaultValue = "false") boolean deploy, - @RequestBody(required = false) Map deploymentProperties) { + public StreamDefinitionResource saveWithDeployProps( + @RequestParam("name") String name, + @RequestParam("definition") String dsl, + @RequestParam(value = "description", defaultValue = "") String description, + @RequestParam(value = "deploy", defaultValue = "false") boolean deploy, + @RequestBody(required = false) Map deploymentProperties + ) { StreamDefinition streamDefinition = this.streamService.createStream(name, dsl, description, deploy, deploymentProperties); return ((RepresentationModelAssembler) this.streamDefinitionAssemblerProvider.getStreamDefinitionAssembler(Collections.singletonList(streamDefinition))).toModel(streamDefinition); @@ -204,18 +211,20 @@ public void delete(@PathVariable("name") String name) { * Return a list of related stream definition resources based on the given stream name. * Related streams include the main stream and the tap stream(s) on the main stream. * - * @param pageable Pagination information - * @param name the name of an existing stream definition (required) - * @param nested if should recursively findByTaskNameContains for related stream definitions + * @param pageable Pagination information + * @param name the name of an existing stream definition (required) + * @param nested if should recursively findByTaskNameContains for related stream definitions * @param assembler resource assembler for stream definition * @return a list of related stream definitions */ @RequestMapping(value = "/{name}/related", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel listRelated(Pageable pageable, + public PagedModel listRelated( + Pageable pageable, @PathVariable("name") String name, @RequestParam(value = "nested", required = false, defaultValue = "false") boolean nested, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { List result = this.streamService.findRelatedStreams(name, nested); Page page = new PageImpl<>(result, pageable, result.size()); return assembler.toModel(page, @@ -243,7 +252,7 @@ public List listApplications(@PathVariable("n StreamDefinition definition = this.streamService.findOne(name); LinkedList streamAppDefinitions = this.streamDefinitionService.getAppDefinitions(definition); List appRegistrations = new ArrayList<>(); - for (StreamAppDefinition streamAppDefinition: streamAppDefinitions) { + for (StreamAppDefinition streamAppDefinition : streamAppDefinitions) { AppRegistrationResource appRegistrationResource = this.appRegistryAssembler.toModel(this.appRegistryService.find(streamAppDefinition.getRegisteredAppName(), streamAppDefinition.getApplicationType())); appRegistrationResource.setLabel(streamAppDefinition.getName()); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java index 3d68733418..26fbef7a2f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/StreamDeploymentController.java @@ -88,8 +88,8 @@ public class StreamDeploymentController { * Construct a new UpdatableStreamDeploymentController, given a * {@link StreamDeploymentController} and {@link StreamService} and {@link StreamDefinitionService} * - * @param repository the repository this controller will use for stream CRUD operations - * @param streamService the underlying UpdatableStreamService to deploy the stream + * @param repository the repository this controller will use for stream CRUD operations + * @param streamService the underlying UpdatableStreamService to deploy the stream * @param streamDefinitionService the StreamDefinitionService */ public StreamDeploymentController(StreamDefinitionRepository repository, @@ -197,14 +197,16 @@ public ResponseEntity undeployAll() { /** * Request deployment of an existing stream definition. * - * @param name the name of an existing stream definition (required) + * @param name the name of an existing stream definition (required) * @param reuseDeploymentProperties Indicator to re-use deployment properties. * @return The stream deployment */ @RequestMapping(value = "/{name}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public StreamDeploymentResource info(@PathVariable("name") String name, - @RequestParam(value = "reuse-deployment-properties", required = false) boolean reuseDeploymentProperties) { + public StreamDeploymentResource info( + @PathVariable("name") String name, + @RequestParam(value = "reuse-deployment-properties", required = false) boolean reuseDeploymentProperties + ) { StreamDefinition streamDefinition = this.repository.findById(name) .orElseThrow(() -> new NoSuchStreamDefinitionException(name)); StreamDeployment streamDeployment = this.streamService.info(name); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java index fd473b19be..bf9fc16260 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskDefinitionController.java @@ -29,17 +29,17 @@ import org.springframework.cloud.dataflow.core.dsl.TaskNode; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.dataflow.server.controller.assembler.TaskDefinitionAssemblerProvider; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionAwareTaskDefinition; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskQueryParamException; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.server.service.TaskDeleteService; import org.springframework.cloud.dataflow.server.service.TaskSaveService; import org.springframework.cloud.dataflow.server.service.impl.TaskServiceUtils; import org.springframework.cloud.deployer.spi.task.TaskLauncher; -import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.web.PagedResourcesAssembler; @@ -78,7 +78,7 @@ public class TaskDefinitionController { private final TaskDeleteService taskDeleteService; - private final TaskExplorer explorer; + private final AggregateTaskExplorer explorer; private final TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider; @@ -89,15 +89,15 @@ public class TaskDefinitionController { *

  • task status checks to the provided {@link TaskLauncher}
  • * * - * @param taskExplorer used to look up TaskExecutions. - * @param repository the repository this controller will use for task CRUD operations. - * @param taskSaveService handles Task saving related operations. - * @param taskDeleteService handles Task deletion related operations. + * @param taskExplorer used to look up TaskExecutions. + * @param repository the repository this controller will use for task CRUD operations. + * @param taskSaveService handles Task saving related operations. + * @param taskDeleteService handles Task deletion related operations. * @param taskDefinitionAssemblerProvider the task definition assembler provider to use. */ - public TaskDefinitionController(TaskExplorer taskExplorer, TaskDefinitionRepository repository, - TaskSaveService taskSaveService, TaskDeleteService taskDeleteService, - TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { + public TaskDefinitionController(AggregateTaskExplorer taskExplorer, TaskDefinitionRepository repository, + TaskSaveService taskSaveService, TaskDeleteService taskDeleteService, + TaskDefinitionAssemblerProvider taskDefinitionAssemblerProvider) { Assert.notNull(taskExplorer, "taskExplorer must not be null"); Assert.notNull(repository, "repository must not be null"); Assert.notNull(taskSaveService, "taskSaveService must not be null"); @@ -113,14 +113,17 @@ public TaskDefinitionController(TaskExplorer taskExplorer, TaskDefinitionReposit /** * Register a task definition for future execution. * - * @param name name the name of the task - * @param dsl DSL definition for the task + * @param name name the name of the task + * @param dsl DSL definition for the task * @param description description of the task definition * @return the task definition */ @RequestMapping(value = "", method = RequestMethod.POST) - public TaskDefinitionResource save(@RequestParam("name") String name, @RequestParam("definition") String dsl, - @RequestParam(value = "description", defaultValue = "") String description) { + public TaskDefinitionResource save( + @RequestParam("name") String name, + @RequestParam("definition") String dsl, + @RequestParam(value = "description", defaultValue = "") String description + ) { TaskDefinition taskDefinition = new TaskDefinition(name, dsl, description); taskSaveService.saveTaskDefinition(taskDefinition); return this.taskDefinitionAssemblerProvider.getTaskDefinitionAssembler(false).toModel(new TaskExecutionAwareTaskDefinition(taskDefinition)); @@ -129,19 +132,21 @@ public TaskDefinitionResource save(@RequestParam("name") String name, @RequestPa /** * Delete the task from the repository so that it can no longer be executed. * - * @param name name of the task to be deleted + * @param name name of the task to be deleted * @param cleanup optional cleanup indicator. */ @RequestMapping(value = "/{name}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) - public void destroyTask(@PathVariable("name") String name, @RequestParam(required = false) Boolean cleanup) { + public void destroyTask( + @PathVariable("name") String name, + @RequestParam(required = false) Boolean cleanup + ) { boolean taskExecutionCleanup = (cleanup != null && cleanup) ? cleanup : false; this.taskDeleteService.deleteTaskDefinition(name, taskExecutionCleanup); } /** * Delete all task from the repository. - * */ @RequestMapping(value = "", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) @@ -152,25 +157,26 @@ public void destroyAll() { /** * Return a page-able list of {@link TaskDefinitionResource} defined tasks. * - * @param pageable page-able collection of {@code TaskDefinitionResource} - * @param search optional findByTaskNameContains parameter (Deprecated: please use taskName instead) - * @param taskName optional findByTaskNameContains parameter - * @param dslText optional findByDslText parameter + * @param pageable page-able collection of {@code TaskDefinitionResource} + * @param search optional findByTaskNameContains parameter (Deprecated: please use taskName instead) + * @param taskName optional findByTaskNameContains parameter + * @param dslText optional findByDslText parameter * @param description optional findByDescription parameter - * @param manifest optional manifest flag to indicate whether the latest task execution requires task manifest update - * @param assembler assembler for the {@link TaskDefinition} + * @param manifest optional manifest flag to indicate whether the latest task execution requires task manifest update + * @param assembler assembler for the {@link TaskDefinition} * @return a list of task definitions */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, - @RequestParam(required = false) @Deprecated String search, - @RequestParam(required = false) String taskName, - @RequestParam(required = false) String description, - @RequestParam(required = false) boolean manifest, - @RequestParam(required = false) String dslText, - PagedResourcesAssembler assembler) { - + public PagedModel list( + Pageable pageable, + @RequestParam(required = false) @Deprecated String search, + @RequestParam(required = false) String taskName, + @RequestParam(required = false) String description, + @RequestParam(required = false) boolean manifest, + @RequestParam(required = false) String dslText, + PagedResourcesAssembler assembler + ) { final Page taskDefinitions; if (Stream.of(search, taskName, description, dslText).filter(Objects::nonNull).count() > 1L) { @@ -193,7 +199,7 @@ public PagedModel list(Pageable pageable, .stream() .collect(Collectors.toMap(TaskDefinition::getTaskName, Function.identity())); - List taskExecutions = null; + List taskExecutions = null; if (!taskDefinitionMap.isEmpty()) { taskExecutions = this.explorer.getLatestTaskExecutionsByTaskNames(taskDefinitionMap.keySet().toArray(new String[0])); } @@ -210,12 +216,12 @@ public PagedModel list(Pageable pageable, private Collection updateComposedTaskElement(Collection taskDefinitionResources, - Page taskDefinitions) { + Page taskDefinitions) { Map taskNameResources = new HashMap<>(); - for (TaskDefinitionResource taskDefinitionResource: taskDefinitionResources) { + for (TaskDefinitionResource taskDefinitionResource : taskDefinitionResources) { taskNameResources.put(taskDefinitionResource.getName(), taskDefinitionResource); } - for (TaskDefinition taskDefinition: taskDefinitions) { + for (TaskDefinition taskDefinition : taskDefinitions) { TaskParser taskParser = new TaskParser(taskDefinition.getName(), taskDefinition.getDslText(), true, true); TaskNode taskNode = taskParser.parse(); if (taskNode.isComposed()) { @@ -232,23 +238,25 @@ private Collection updateComposedTaskElement(C /** * Return a given task definition resource. * - * @param name the name of an existing task definition (required) + * @param name the name of an existing task definition (required) * @param manifest indicator to include manifest in response. * @return the task definition */ @RequestMapping(value = "/{name}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public TaskDefinitionResource display(@PathVariable("name") String name, @RequestParam(required = false, name = "manifest") boolean manifest) { + public TaskDefinitionResource display( + @PathVariable("name") String name, + @RequestParam(required = false, name = "manifest") boolean manifest + ) { TaskDefinition definition = this.repository.findById(name) .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); - final TaskExecution taskExecution = this.explorer.getLatestTaskExecutionForTaskName(name); + final AggregateTaskExecution taskExecution = this.explorer.getLatestTaskExecutionForTaskName(name); final RepresentationModelAssembler taskAssembler = this.taskDefinitionAssemblerProvider.getTaskDefinitionAssembler(manifest); TaskDefinitionResource taskDefinitionResource; if (taskExecution != null) { taskDefinitionResource = taskAssembler.toModel(new TaskExecutionAwareTaskDefinition(definition, taskExecution)); - } - else { + } else { taskDefinitionResource = taskAssembler.toModel(new TaskExecutionAwareTaskDefinition(definition)); } // Identify if the task definition is a composed task element @@ -269,24 +277,23 @@ private void updateComposedTaskElement(TaskDefinitionResource taskDefinitionReso } class TaskDefinitionConverter implements Function { - final Map taskExecutions; + final Map taskExecutions; - public TaskDefinitionConverter(List taskExecutions) { + public TaskDefinitionConverter(List taskExecutions) { super(); if (taskExecutions != null) { this.taskExecutions = new HashMap<>(taskExecutions.size()); - for (TaskExecution taskExecution : taskExecutions) { + for (AggregateTaskExecution taskExecution : taskExecutions) { this.taskExecutions.put(taskExecution.getTaskName(), taskExecution); } - } - else { + } else { this.taskExecutions = null; } } @Override public TaskExecutionAwareTaskDefinition apply(TaskDefinition source) { - TaskExecution lastTaskExecution = null; + AggregateTaskExecution lastTaskExecution = null; if (taskExecutions != null) { lastTaskExecution = taskExecutions.get(source.getName()); @@ -294,10 +301,11 @@ public TaskExecutionAwareTaskDefinition apply(TaskDefinition source) { if (lastTaskExecution != null) { return new TaskExecutionAwareTaskDefinition(source, lastTaskExecution); - } - else { + } else { return new TaskExecutionAwareTaskDefinition(source); } } - }; + } + + ; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java index bf71636247..876a08f2e8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskExecutionController.java @@ -23,11 +23,16 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.PlatformTaskExecutionInformation; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskManifest; @@ -35,10 +40,14 @@ import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; import org.springframework.cloud.dataflow.rest.job.TaskJobExecutionRel; import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource; +import org.springframework.cloud.dataflow.rest.resource.LaunchResponseResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; import org.springframework.cloud.dataflow.rest.resource.TaskExecutionsInfoResource; +import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; import org.springframework.cloud.dataflow.rest.util.TaskSanitizer; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionControllerDeleteAction; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; @@ -54,11 +63,13 @@ import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.Link; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; import org.springframework.http.HttpStatus; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; @@ -88,14 +99,18 @@ public class TaskExecutionController { private final Assembler taskAssembler = new Assembler(); - + private final LaunchResponseAssembler launcherResponseAssembler = new LaunchResponseAssembler(); private final TaskExecutionService taskExecutionService; private final TaskExecutionInfoService taskExecutionInfoService; private final TaskDeleteService taskDeleteService; - private final TaskExplorer explorer; + private final AggregateTaskExplorer explorer; + + private final AggregateExecutionSupport aggregateExecutionSupport; + + private final TaskDefinitionReader taskDefinitionReader; private final TaskJobService taskJobService; @@ -105,6 +120,9 @@ public class TaskExecutionController { private final Logger logger = LoggerFactory.getLogger(TaskExecutionController.class); + + private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); + private static final List allowedSorts = Arrays.asList("TASK_EXECUTION_ID", "START_TIME", "END_TIME", "TASK_NAME", "EXIT_CODE", "EXIT_MESSAGE", "ERROR_MESSAGE", "LAST_UPDATED", "EXTERNAL_EXECUTION_ID", "PARENT_EXECUTION_ID"); @@ -113,17 +131,25 @@ public class TaskExecutionController { * Creates a {@code TaskExecutionController} that retrieves Task Execution information * from a the {@link TaskExplorer} * - * @param explorer the explorer this controller will use for retrieving task execution - * information. - * @param taskExecutionService used to launch tasks - * @param taskDefinitionRepository the task definition repository - * @param taskExecutionInfoService the task execution information service - * @param taskDeleteService the task deletion service - * @param taskJobService the task job service + * @param explorer the explorer this controller will use for retrieving task execution + * information. + * @param aggregateExecutionSupport provides schemaTarget for a task by name. + * @param taskExecutionService used to launch tasks + * @param taskDefinitionRepository the task definition repository + * @param taskDefinitionReader uses task definition repository to provide Task Definition to aggregateExecutionSupport + * @param taskExecutionInfoService the task execution information service + * @param taskDeleteService the task deletion service + * @param taskJobService the task job service */ - public TaskExecutionController(TaskExplorer explorer, TaskExecutionService taskExecutionService, - TaskDefinitionRepository taskDefinitionRepository, TaskExecutionInfoService taskExecutionInfoService, - TaskDeleteService taskDeleteService, TaskJobService taskJobService) { + public TaskExecutionController(AggregateTaskExplorer explorer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskExecutionService taskExecutionService, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeleteService taskDeleteService, + TaskJobService taskJobService) { + this.taskDefinitionReader = taskDefinitionReader; Assert.notNull(explorer, "explorer must not be null"); Assert.notNull(taskExecutionService, "taskExecutionService must not be null"); Assert.notNull(taskDefinitionRepository, "taskDefinitionRepository must not be null"); @@ -132,6 +158,7 @@ public TaskExecutionController(TaskExplorer explorer, TaskExecutionService taskE Assert.notNull(taskJobService, "taskJobService must not be null"); this.taskExecutionService = taskExecutionService; this.explorer = explorer; + this.aggregateExecutionSupport = aggregateExecutionSupport; this.taskDefinitionRepository = taskDefinitionRepository; this.taskExecutionInfoService = taskExecutionInfoService; this.taskDeleteService = taskDeleteService; @@ -141,16 +168,16 @@ public TaskExecutionController(TaskExplorer explorer, TaskExecutionService taskE /** * Return a page-able list of {@link TaskExecutionResource} defined tasks. * - * @param pageable page-able collection of {@code TaskExecution}s. + * @param pageable page-able collection of {@code TaskExecution}s. * @param assembler for the {@link TaskExecution}s * @return a list of task executions */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) public PagedModel list(Pageable pageable, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler) { validatePageable(pageable); - Page taskExecutions = this.explorer.findAll(pageable); + Page taskExecutions = this.explorer.findAll(pageable); Page result = getPageableRelationships(taskExecutions, pageable); return assembler.toModel(result, this.taskAssembler); } @@ -158,19 +185,22 @@ public PagedModel list(Pageable pageable, /** * Retrieve all task executions with the task name specified * - * @param taskName name of the task - * @param pageable page-able collection of {@code TaskExecution}s. + * @param taskName name of the task + * @param pageable page-able collection of {@code TaskExecution}s. * @param assembler for the {@link TaskExecution}s * @return the paged list of task executions */ @RequestMapping(value = "", method = RequestMethod.GET, params = "name") @ResponseStatus(HttpStatus.OK) - public PagedModel retrieveTasksByName(@RequestParam("name") String taskName, - Pageable pageable, PagedResourcesAssembler assembler) { + public PagedModel retrieveTasksByName( + @RequestParam("name") String taskName, + Pageable pageable, + PagedResourcesAssembler assembler + ) { validatePageable(pageable); this.taskDefinitionRepository.findById(taskName) .orElseThrow(() -> new NoSuchTaskDefinitionException(taskName)); - Page taskExecutions = this.explorer.findTaskExecutionsByName(taskName, pageable); + Page taskExecutions = this.explorer.findTaskExecutionsByName(taskName, pageable); Page result = getPageableRelationships(taskExecutions, pageable); return assembler.toModel(result, this.taskAssembler); } @@ -180,21 +210,41 @@ public PagedModel retrieveTasksByName(@RequestParam("name * if `spring.cloud.dataflow.task.auto-create-task-definitions` is true. * The name must be included in the path. * - * @param taskName the name of the task to be executed (required) + * @param taskName the name of the task to be executed (required) * @param properties the runtime properties for the task, as a comma-delimited list of - * key=value pairs - * @param arguments the runtime commandline arguments + * key=value pairs + * @param arguments the runtime commandline arguments * @return the taskExecutionId for the executed task */ @RequestMapping(value = "", method = RequestMethod.POST, params = "name") @ResponseStatus(HttpStatus.CREATED) - public long launch(@RequestParam("name") String taskName, + public long launch( + @RequestParam("name") String taskName, @RequestParam(required = false) String properties, - @RequestParam(required = false) String arguments) { + @RequestParam(required = false) String arguments + ) { + SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + if(!schemaVersionTarget.equals(SchemaVersionTarget.defaultTarget())) { + Link link = linkTo(methodOn(TaskExecutionController.class).launchBoot3(taskName, properties, arguments)).withRel("launch"); + throw new ApiNotSupportedException(String.format("Task: %s cannot be launched for %s. Use %s", taskName, SchemaVersionTarget.defaultTarget().getName(), link.getHref())); + } Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); - - return this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + LaunchResponse launchResponse = this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + return launchResponse.getExecutionId(); + } + @RequestMapping(value = "/launch", method = RequestMethod.POST, params = "name") + @ResponseStatus(HttpStatus.CREATED) + public LaunchResponseResource launchBoot3( + @RequestParam("name") String taskName, + @RequestParam(required = false) String properties, + @RequestParam(required = false) String arguments + ) { + // TODO update docs and root + Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); + List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); + LaunchResponse launchResponse = this.taskExecutionService.executeTask(taskName, propertiesToUse, argumentsToUse); + return this.launcherResponseAssembler.toModel(launchResponse); } /** @@ -205,19 +255,45 @@ public long launch(@RequestParam("name") String taskName, */ @RequestMapping(value = "/{id}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - // TODO add schemaVersionTarget queryParam - public TaskExecutionResource view(@PathVariable("id") long id) { - TaskExecution taskExecution = this.explorer.getTaskExecution(id); + public TaskExecutionResource view( + @PathVariable(name = "id") Long id, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + AggregateTaskExecution taskExecution = sanitizeTaskExecutionArguments(this.explorer.getTaskExecution(id, schemaTarget)); if (taskExecution == null) { - throw new NoSuchTaskExecutionException(id); + throw new NoSuchTaskExecutionException(id, schemaTarget); } - taskExecution = this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution); - TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(id); + TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(id, schemaTarget); taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); - List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())); + List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId(), schemaTarget)); TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel(taskExecution, jobExecutionIds, - taskManifest, getCtrTaskJobExecution(taskExecution, jobExecutionIds)); + taskManifest, + getCtrTaskJobExecution(taskExecution, jobExecutionIds) + ); + return this.taskAssembler.toModel(taskJobExecutionRel); + } + @RequestMapping(value = "/external/{externalExecutionId}", method = RequestMethod.GET) + @ResponseStatus(HttpStatus.OK) + public TaskExecutionResource viewByExternal( + @PathVariable(name = "externalExecutionId") String externalExecutionId, + @RequestParam(name = "platform", required = false) String platform + ) { + AggregateTaskExecution taskExecution = sanitizeTaskExecutionArguments(this.explorer.getTaskExecutionByExternalExecutionId(externalExecutionId, platform)); + if (taskExecution == null) { + throw new NoSuchTaskExecutionException(externalExecutionId, platform); + } + TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); + taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); + List jobExecutionIds = new ArrayList<>(this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId(), taskExecution.getSchemaTarget())); + TaskJobExecutionRel taskJobExecutionRel = new TaskJobExecutionRel( + taskExecution, + jobExecutionIds, + taskManifest, + getCtrTaskJobExecution(taskExecution, jobExecutionIds) + ); return this.taskAssembler.toModel(taskJobExecutionRel); } @@ -230,7 +306,7 @@ public Collection getCurrentTaskExecutionsInfo() executionInformation.forEach(platformTaskExecutionInformation -> { CurrentTaskExecutionsResource currentTaskExecutionsResource = - CurrentTaskExecutionsResource.fromTaskExecutionInformation(platformTaskExecutionInformation); + CurrentTaskExecutionsResource.fromTaskExecutionInformation(platformTaskExecutionInformation); resources.add(currentTaskExecutionsResource); }); @@ -242,16 +318,18 @@ public Collection getCurrentTaskExecutionsInfo() * optional {@code actions} parameter can be used to not only clean up task execution resources, * but can also trigger the deletion of task execution and job data in the persistence store. * - * @param ids The id of the {@link TaskExecution}s to clean up + * @param ids The id of the {@link TaskExecution}s to clean up * @param actions Defaults to "CLEANUP" if not specified */ @RequestMapping(value = "/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) - // TODO add schemaVersionTarget queryParam - public void cleanup(@PathVariable("id") Set ids, - @RequestParam(defaultValue = "CLEANUP", name="action") TaskExecutionControllerDeleteAction[] actions) { + public void cleanup( + @PathVariable("id") Set ids, + @RequestParam(defaultValue = "CLEANUP", name = "action") TaskExecutionControllerDeleteAction[] actions, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) { final Set actionsAsSet = new HashSet<>(Arrays.asList(actions)); - this.taskDeleteService.cleanupExecutions(actionsAsSet, ids); + this.taskDeleteService.cleanupExecutions(actionsAsSet, ids, schemaTarget); } /** @@ -259,60 +337,70 @@ public void cleanup(@PathVariable("id") Set ids, * optional {@code actions} and {@code completed} parameters can be used to not only clean up task execution resources, * but can also trigger the deletion of task execution and job data in the persistence store. * - * @param actions Defaults to "CLEANUP" if not specified + * @param actions Defaults to "CLEANUP" if not specified * @param completed Defaults to cleanup only completed task executions - * @param taskName Optional name of task to clean up. + * @param taskName Optional name of task to clean up. */ @RequestMapping(method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) public void cleanupAll( - @RequestParam(defaultValue = "CLEANUP", name="action") TaskExecutionControllerDeleteAction[] actions, - @RequestParam(defaultValue = "false", name="completed") boolean completed, - @RequestParam(defaultValue = "", name="name") String taskName) { - - this.taskDeleteService.cleanupExecutions(new HashSet<>(Arrays.asList(actions)), - this.taskExecutionService.getAllTaskExecutionIds(completed, taskName)); + @RequestParam(defaultValue = "CLEANUP", name = "action") TaskExecutionControllerDeleteAction[] actions, + @RequestParam(defaultValue = "false", name = "completed") boolean completed, + @RequestParam(defaultValue = "", name = "name") String taskName + ) { + this.taskDeleteService.cleanupExecutions(new HashSet<>(Arrays.asList(actions)), taskName, completed); } /** * Stop a set of task executions. * - * @param ids the ids of the {@link TaskExecution}s to stop + * @param ids the ids of the {@link TaskExecution}s to stop * @param platform the platform name */ @RequestMapping(value = "/{id}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.OK) - // TODO add schemaVersionTarget queryParam - public void stop(@PathVariable("id") Set ids, - @RequestParam(defaultValue = "", name="platform") String platform) { - this.taskExecutionService.stopTaskExecution(ids, platform); + public void stop( + @PathVariable("id") Set ids, + @RequestParam(defaultValue = "", name = "platform") String platform, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) { + this.taskExecutionService.stopTaskExecution(ids, schemaTarget, platform); } - private Page getPageableRelationships(Page taskExecutions, Pageable pageable) { + private Page getPageableRelationships(Page taskExecutions, Pageable pageable) { List taskJobExecutionRels = new ArrayList<>(); - for (TaskExecution taskExecution : taskExecutions.getContent()) { - TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId()); + for (AggregateTaskExecution taskExecution : taskExecutions.getContent()) { + TaskManifest taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); List jobExecutionIds = new ArrayList<>( - this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId())); + this.explorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId(), taskExecution.getSchemaTarget())); taskJobExecutionRels - .add(new TaskJobExecutionRel(this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution), + .add(new TaskJobExecutionRel(sanitizeTaskExecutionArguments(taskExecution), jobExecutionIds, taskManifest, getCtrTaskJobExecution(taskExecution, jobExecutionIds))); } return new PageImpl<>(taskJobExecutionRels, pageable, taskExecutions.getTotalElements()); } - private TaskJobExecution getCtrTaskJobExecution(TaskExecution taskExecution, List jobExecutionIds) { + + private AggregateTaskExecution sanitizeTaskExecutionArguments(AggregateTaskExecution taskExecution) { + if (taskExecution != null) { + List args = taskExecution.getArguments().stream() + .map(this.argumentSanitizer::sanitize).collect(Collectors.toList()); + taskExecution.setArguments(args); + } + return taskExecution; + } + + private TaskJobExecution getCtrTaskJobExecution(AggregateTaskExecution taskExecution, List jobExecutionIds) { TaskJobExecution taskJobExecution = null; TaskDefinition taskDefinition = this.taskDefinitionRepository.findByTaskName(taskExecution.getTaskName()); - if(taskDefinition != null) { - TaskParser parser = new TaskParser(taskExecution.getTaskName(), taskDefinition.getDslText(),true, false); - if(jobExecutionIds.size() > 0 && parser.parse().isComposed()) { + if (taskDefinition != null) { + TaskParser parser = new TaskParser(taskExecution.getTaskName(), taskDefinition.getDslText(), true, false); + if (jobExecutionIds.size() > 0 && parser.parse().isComposed()) { try { - taskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0]); - } - catch(NoSuchJobExecutionException noSuchJobExecutionException) { + taskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0], taskExecution.getSchemaTarget()); + } catch (NoSuchJobExecutionException noSuchJobExecutionException) { this.logger.warn("Job Execution for Task Execution {} could not be found.", taskExecution.getExecutionId()); } @@ -347,20 +435,28 @@ public Assembler() { @Override public TaskExecutionResource toModel(TaskJobExecutionRel taskJobExecutionRel) { - // TODO add schemaVersionTarget queryParam to self - TaskExecutionResource resource = createModelWithId(taskJobExecutionRel.getTaskExecution().getExecutionId(), taskJobExecutionRel); - if(!resource.getLink("tasks/logs").isPresent()) { - resource.add(linkTo(methodOn(TaskLogsController.class).getLog(resource.getExternalExecutionId(), resource.getPlatformName())).withRel("tasks/logs")); - } + + TaskExecutionResource resource = new TaskExecutionResource(taskJobExecutionRel); + resource.add( + linkTo( + methodOn(TaskLogsController.class) + .getLog(resource.getExternalExecutionId(), resource.getPlatformName(), resource.getSchemaTarget()) + ).withRel("tasks/logs") + ); + + resource.add( + linkTo( + methodOn(TaskExecutionController.class) + .view(taskJobExecutionRel.getTaskExecution().getExecutionId(), taskJobExecutionRel.getTaskExecution().getSchemaTarget()) + ).withSelfRel()); return resource; } @Override public TaskExecutionResource instantiateModel(TaskJobExecutionRel taskJobExecutionRel) { - // TODO add schemaVersionTarget queryParam TaskExecutionResource resource = new TaskExecutionResource(taskJobExecutionRel); - if(!resource.getLink("tasks/logs").isPresent()) { - resource.add(linkTo(methodOn(TaskLogsController.class).getLog(resource.getExternalExecutionId(), resource.getPlatformName())).withRel("tasks/logs")); + if (!resource.getLink("tasks/logs").isPresent()) { + resource.add(linkTo(methodOn(TaskLogsController.class).getLog(resource.getExternalExecutionId(), resource.getPlatformName(), resource.getSchemaTarget())).withRel("tasks/logs")); } return resource; } @@ -389,5 +485,16 @@ public TaskExecutionsInfoResource instantiateModel(Integer totalExecutions) { return taskExecutionsInfoResource; } } + private static class LaunchResponseAssembler extends RepresentationModelAssemblerSupport { + public LaunchResponseAssembler() { + super(TaskExecutionController.class, LaunchResponseResource.class); + } + @Override + public LaunchResponseResource toModel(LaunchResponse entity) { + LaunchResponseResource resource = new LaunchResponseResource(entity.getExecutionId(), entity.getSchemaTarget()); + resource.add(linkTo(methodOn(TaskExecutionController.class).view(entity.getExecutionId(), entity.getSchemaTarget())).withSelfRel()); + return resource; + } + } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java index 4a3e37801c..cb13849e73 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskLogsController.java @@ -57,7 +57,11 @@ public TaskLogsController(TaskExecutionService taskExecutionService) { */ @RequestMapping(value = "/{taskExternalExecutionId}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public ResponseEntity getLog(@PathVariable String taskExternalExecutionId, @RequestParam(required = false, defaultValue = "default") String platformName) { - return new ResponseEntity<>(this.taskExecutionService.getLog(platformName, taskExternalExecutionId), HttpStatus.OK); + public ResponseEntity getLog( + @PathVariable String taskExternalExecutionId, + @RequestParam(name = "platformName", required = false, defaultValue = "default") String platformName, + @RequestParam(name = "schemaTarget", required = false) String schemaTarget + ) { + return new ResponseEntity<>(this.taskExecutionService.getLog(platformName, taskExternalExecutionId, schemaTarget), HttpStatus.OK); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java index 40914bd94f..fec9fc079b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskPlatformController.java @@ -58,9 +58,11 @@ public TaskPlatformController(LauncherService launcherService) { */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, + public PagedModel list( + Pageable pageable, @RequestParam(value = "schedulesEnabled", required = false) String schedulesEnabled, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { PagedModel result; if(StringUtils.hasText(schedulesEnabled) && schedulesEnabled.toLowerCase().equals("true")) { result = assembler.toModel(this.launcherService.getLaunchersWithSchedules(pageable), this.launcherAssembler); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java index 71bfa8df47..800a214eaa 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TaskSchedulerController.java @@ -81,9 +81,11 @@ public TaskSchedulerController(SchedulerService schedulerService) { */ @RequestMapping(value = "", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public PagedModel list(Pageable pageable, + public PagedModel list( + Pageable pageable, @RequestParam(value = "platform", required = false) String platform, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { List result = this.schedulerService.listForPlatform(platform); return assembler.toModel(new PageImpl<>(result, pageable, result.size()), taskAssembler); } @@ -97,8 +99,10 @@ public PagedModel list(Pageable pageable, */ @RequestMapping(value = "/{name}", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public ScheduleInfoResource getSchedule(@PathVariable("name") String scheduleName, - @RequestParam(value = "platform", required = false) String platform) { + public ScheduleInfoResource getSchedule( + @PathVariable("name") String scheduleName, + @RequestParam(value = "platform", required = false) String platform + ) { ScheduleInfo schedule = this.schedulerService.getSchedule(scheduleName, platform); if (schedule == null) { throw new NoSuchScheduleException(String.format("Schedule [%s] doesn't exist" , scheduleName)); @@ -116,9 +120,11 @@ public ScheduleInfoResource getSchedule(@PathVariable("name") String scheduleNam * @return a list of Schedules. */ @RequestMapping("/instances/{taskDefinitionName}") - public PagedModel filteredList(@PathVariable String taskDefinitionName, + public PagedModel filteredList( + @PathVariable String taskDefinitionName, @RequestParam(value = "platform", required = false) String platform, - PagedResourcesAssembler assembler) { + PagedResourcesAssembler assembler + ) { List result = this.schedulerService.list(taskDefinitionName, platform); int resultSize = result.size(); Pageable pageable = PageRequest.of(0, @@ -151,11 +157,13 @@ public void deleteSchedulesforDefinition(@PathVariable String taskDefinitionName */ @RequestMapping(value = "", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) - public void save(@RequestParam("scheduleName") String scheduleName, + public void save( + @RequestParam("scheduleName") String scheduleName, @RequestParam("taskDefinitionName") String taskDefinitionName, @RequestParam String properties, @RequestParam(required = false) String arguments, - @RequestParam(value = "platform", required = false) String platform) { + @RequestParam(value = "platform", required = false) String platform + ) { Map propertiesToUse = DeploymentPropertiesUtils.parse(properties); List argumentsToUse = DeploymentPropertiesUtils.parseArgumentList(arguments, " "); this.schedulerService.schedule(StringUtils.trimWhitespace(scheduleName), taskDefinitionName, @@ -170,8 +178,10 @@ public void save(@RequestParam("scheduleName") String scheduleName, */ @RequestMapping(value = "/{scheduleName}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.OK) - public void unschedule(@PathVariable("scheduleName") String scheduleName, - @RequestParam(value = "platform", required = false) String platform) { + public void unschedule( + @PathVariable("scheduleName") String scheduleName, + @RequestParam(value = "platform", required = false) String platform + ) { schedulerService.unschedule(scheduleName, platform); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java index c8d4ebbed8..fada8417d2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/TasksInfoController.java @@ -60,9 +60,11 @@ public TasksInfoController(TaskExecutionService taskExecutionService) { @RequestMapping(value= "executions", method = RequestMethod.GET) @ResponseStatus(HttpStatus.OK) - public TaskExecutionsInfoResource getInfo(@RequestParam(required = false, defaultValue = "false", name="completed") String completed, - @RequestParam(required = false, defaultValue = "", name="name") String taskName) { - return this.taskExecutionsAssembler.toModel(this.taskExecutionService.getAllTaskExecutionsCount(Boolean.valueOf(completed), taskName)); + public TaskExecutionsInfoResource getInfo( + @RequestParam(required = false, defaultValue = "false", name="completed") String completed, + @RequestParam(required = false, defaultValue = "", name="name") String taskName + ) { + return this.taskExecutionsAssembler.toModel(this.taskExecutionService.getAllTaskExecutionsCount(Boolean.parseBoolean(completed), taskName)); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java index b01ab79d2b..819f0e6417 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/VisibleProperties.java @@ -22,6 +22,8 @@ import java.util.Map; import java.util.Set; +import org.slf4j.LoggerFactory; + import org.springframework.boot.configurationmetadata.ConfigurationMetadataProperty; import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver; import org.springframework.cloud.dataflow.core.RelaxedNames; @@ -71,10 +73,18 @@ public Map qualifyProperties(Map properties, Res String provided = entry.getKey(); if (!allProps.contains(provided)) { List longForms = null; - for (String relaxed : new RelaxedNames(provided)) { - longForms = visible.get(relaxed); - if (longForms != null) { - break; + RelaxedNames relaxedNames = null; + try { + relaxedNames = new RelaxedNames(provided); + } catch (Exception x) { + LoggerFactory.getLogger(getClass()).error("Exception determining relaxed name for " + provided, x); + } + if(relaxedNames != null) { + for (String relaxed : relaxedNames) { + longForms = visible.get(relaxed); + if (longForms != null) { + break; + } } } if (longForms != null) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java index 96866b0d8c..1dded4e847 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssembler.java @@ -15,7 +15,9 @@ */ package org.springframework.cloud.dataflow.server.controller.assembler; +import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,13 +30,14 @@ import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.rest.util.TaskSanitizer; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.dataflow.server.controller.TaskDefinitionController; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionAwareTaskDefinition; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; import org.springframework.cloud.dataflow.server.service.TaskJobService; import org.springframework.cloud.dataflow.server.service.impl.TaskServiceUtils; -import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.hateoas.server.mvc.RepresentationModelAssemblerSupport; /** @@ -55,7 +58,7 @@ public class DefaultTaskDefinitionAssembler ex private final TaskJobService taskJobService; - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; private final TaskSanitizer taskSanitizer = new TaskSanitizer(); @@ -63,34 +66,40 @@ public class DefaultTaskDefinitionAssembler ex private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); - public DefaultTaskDefinitionAssembler(TaskExecutionService taskExecutionService, boolean enableManifest, - Class classType, TaskJobService taskJobService, TaskExplorer taskExplorer) { + private final AggregateExecutionSupport aggregateExecutionSupport; + + public DefaultTaskDefinitionAssembler( + TaskExecutionService taskExecutionService, + boolean enableManifest, + Class classType, + TaskJobService taskJobService, + AggregateTaskExplorer taskExplorer, + AggregateExecutionSupport aggregateExecutionSupport) { super(TaskDefinitionController.class, classType); this.taskExecutionService = taskExecutionService; this.enableManifest = enableManifest; this.taskJobService = taskJobService; this.taskExplorer = taskExplorer; + this.aggregateExecutionSupport = aggregateExecutionSupport; } TaskDefinitionResource updateTaskExecutionResource( TaskExecutionAwareTaskDefinition taskExecutionAwareTaskDefinition, TaskDefinitionResource taskDefinitionResource, boolean manifest) { - TaskExecution taskExecution = taskExecutionAwareTaskDefinition.getLatestTaskExecution(); - taskExecution = this.taskSanitizer.sanitizeTaskExecutionArguments(taskExecution); + AggregateTaskExecution taskExecution = this.sanitizeTaskExecutionArguments(taskExecutionAwareTaskDefinition.getLatestTaskExecution()); TaskManifest taskManifest = null; if (manifest) { - taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId()); + taskManifest = this.taskExecutionService.findTaskManifestById(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); taskManifest = this.taskSanitizer.sanitizeTaskManifest(taskManifest); } TaskJobExecution composedTaskJobExecution = null; if (taskExecution != null && taskDefinitionResource.isComposed()) { - Set jobExecutionIds = this.taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId()); + Set jobExecutionIds = this.taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); if(jobExecutionIds != null && jobExecutionIds.size() > 0) { try { - composedTaskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0]); - } - catch(NoSuchJobExecutionException noSuchJobExecutionException) { + composedTaskJobExecution = this.taskJobService.getJobExecution(jobExecutionIds.toArray(new Long[0])[0], taskExecution.getSchemaTarget()); + } catch (NoSuchJobExecutionException noSuchJobExecutionException) { logger.warn("Job Execution for Task Execution {} could not be found.", taskExecution.getExecutionId()); } @@ -102,7 +111,12 @@ TaskDefinitionResource updateTaskExecutionResource( taskDefinitionResource.setLastTaskExecution(taskExecutionResource); return taskDefinitionResource; } - + private AggregateTaskExecution sanitizeTaskExecutionArguments(AggregateTaskExecution taskExecution) { + List args = taskExecution.getArguments().stream() + .map(this.argumentSanitizer::sanitize).collect(Collectors.toList()); + taskExecution.setArguments(args); + return taskExecution; + } @Override public R toModel(TaskExecutionAwareTaskDefinition taskExecutionAwareTaskDefinition) { return createModelWithId(taskExecutionAwareTaskDefinition.getTaskDefinition().getName(), diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java index eb72a5f2b0..3a8274b83b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/assembler/DefaultTaskDefinitionAssemblerProvider.java @@ -16,31 +16,47 @@ package org.springframework.cloud.dataflow.server.controller.assembler; import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.server.service.TaskExecutionService; import org.springframework.cloud.dataflow.server.service.TaskJobService; -import org.springframework.cloud.task.repository.TaskExplorer; +import org.springframework.util.Assert; /** * Default REST resource assembler that returns the {@link TaskDefinitionResource} type. + * * @author Ilayaperumal Gopinathan * @author Glenn Renfro */ public class DefaultTaskDefinitionAssemblerProvider implements TaskDefinitionAssemblerProvider { private final TaskExecutionService taskExecutionService; - private final TaskExplorer taskExplorer; + + private final AggregateTaskExplorer taskExplorer; + private final TaskJobService taskJobService; - public DefaultTaskDefinitionAssemblerProvider(TaskExecutionService taskExecutionService, - TaskJobService taskJobService, TaskExplorer taskExplorer) { + private final AggregateExecutionSupport aggregateExecutionSupport; + + public DefaultTaskDefinitionAssemblerProvider( + TaskExecutionService taskExecutionService, + TaskJobService taskJobService, + AggregateTaskExplorer taskExplorer, + AggregateExecutionSupport aggregateExecutionSupport + ) { + Assert.notNull(taskExecutionService, "taskExecutionService required"); + Assert.notNull(taskJobService, "taskJobService required"); + Assert.notNull(taskExplorer, "taskExplorer required"); + Assert.notNull(aggregateExecutionSupport, "aggregateExecutionSupport required"); this.taskExecutionService = taskExecutionService; this.taskJobService = taskJobService; this.taskExplorer = taskExplorer; + this.aggregateExecutionSupport = aggregateExecutionSupport; } @Override public DefaultTaskDefinitionAssembler getTaskDefinitionAssembler(boolean enableManifest) { return new DefaultTaskDefinitionAssembler(taskExecutionService, enableManifest, - TaskDefinitionResource.class, taskJobService, taskExplorer); + TaskDefinitionResource.class, taskJobService, taskExplorer, aggregateExecutionSupport); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java index 86874bed21..cba3c7f7f7 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionAwareTaskDefinition.java @@ -16,6 +16,7 @@ package org.springframework.cloud.dataflow.server.controller.support; import org.springframework.cloud.dataflow.core.TaskDefinition; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.util.Assert; @@ -29,7 +30,7 @@ public class TaskExecutionAwareTaskDefinition { final TaskDefinition taskDefinition; - final TaskExecution latestTaskExecution; + final AggregateTaskExecution latestTaskExecution; /** * Initialized the {@link TaskExecutionAwareTaskDefinition} with the provided @@ -38,7 +39,7 @@ public class TaskExecutionAwareTaskDefinition { * @param taskDefinition Must not be null * @param latestTaskExecution Must not be null */ - public TaskExecutionAwareTaskDefinition(TaskDefinition taskDefinition, TaskExecution latestTaskExecution) { + public TaskExecutionAwareTaskDefinition(TaskDefinition taskDefinition, AggregateTaskExecution latestTaskExecution) { super(); Assert.notNull(taskDefinition, "The provided taskDefinition must not be null."); @@ -78,7 +79,7 @@ public TaskDefinition getTaskDefinition() { * * @return May return null */ - public TaskExecution getLatestTaskExecution() { + public AggregateTaskExecution getLatestTaskExecution() { return latestTaskExecution; } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java index b25f756ea4..d5f75e36f5 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/controller/support/TaskExecutionControllerDeleteAction.java @@ -15,10 +15,12 @@ */ package org.springframework.cloud.dataflow.server.controller.support; +import java.util.Set; + import org.springframework.cloud.dataflow.server.controller.TaskExecutionController; /** - * This enum is used by the {@link TaskExecutionController#cleanup(java.util.Set, TaskExecutionControllerDeleteAction[])}. + * This enum is used by the {@link TaskExecutionController#cleanup(Set, TaskExecutionControllerDeleteAction[], String)}. * * @author Gunnar Hillert * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java new file mode 100644 index 0000000000..876555c91f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractAggregateViewMigration.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; + +public abstract class AbstractAggregateViewMigration extends AbstractMigration { + public AbstractAggregateViewMigration() { + super(null); + } + + public final static String CREATE_AGGREGATE_TASK_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_TASK_EXECUTION AS\n" + + " SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, 'boot2' AS SCHEMA_TARGET FROM TASK_EXECUTION\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, START_TIME, END_TIME, TASK_NAME, EXIT_CODE, EXIT_MESSAGE, ERROR_MESSAGE, LAST_UPDATED, EXTERNAL_EXECUTION_ID, PARENT_EXECUTION_ID, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_EXECUTION"; + + public final static String CREATE_AGGREGATE_TASK_EXECUTION_PARAMS_VIEW = "CREATE VIEW AGGREGATE_TASK_EXECUTION_PARAMS AS\n" + + " SELECT TASK_EXECUTION_ID, TASK_PARAM, 'boot2' AS SCHEMA_TARGET FROM TASK_EXECUTION_PARAMS\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, TASK_PARAM, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_EXECUTION_PARAMS"; + public final static String CREATE_AGGREGATE_JOB_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_JOB_EXECUTION AS\n" + + " SELECT JOB_EXECUTION_ID, VERSION, JOB_INSTANCE_ID, CREATE_TIME, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot2' AS SCHEMA_TARGET FROM BATCH_JOB_EXECUTION\n" + + "UNION ALL\n" + + " SELECT JOB_EXECUTION_ID, VERSION, JOB_INSTANCE_ID, CREATE_TIME, START_TIME, END_TIME, STATUS, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_JOB_EXECUTION"; + public final static String CREATE_AGGREGATE_JOB_INSTANCE_VIEW = "CREATE VIEW AGGREGATE_JOB_INSTANCE AS\n" + + " SELECT JOB_INSTANCE_ID, VERSION, JOB_NAME, JOB_KEY, 'boot2' AS SCHEMA_TARGET FROM BATCH_JOB_INSTANCE\n" + + "UNION ALL\n" + + " SELECT JOB_INSTANCE_ID, VERSION, JOB_NAME, JOB_KEY, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_JOB_INSTANCE"; + public final static String CREATE_AGGREGATE_TASK_BATCH_VIEW = "CREATE VIEW AGGREGATE_TASK_BATCH AS\n" + + " SELECT TASK_EXECUTION_ID, JOB_EXECUTION_ID, 'boot2' AS SCHEMA_TARGET FROM TASK_TASK_BATCH\n" + + "UNION ALL\n" + + " SELECT TASK_EXECUTION_ID, JOB_EXECUTION_ID, 'boot3' AS SCHEMA_TARGET FROM BOOT3_TASK_TASK_BATCH"; + public final static String CREATE_AGGREGATE_STEP_EXECUTION_VIEW = "CREATE VIEW AGGREGATE_STEP_EXECUTION AS\n" + + " SELECT STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot2' AS SCHEMA_TARGET FROM BATCH_STEP_EXECUTION\n" + + "UNION ALL\n" + + " SELECT STEP_EXECUTION_ID, VERSION, STEP_NAME, JOB_EXECUTION_ID, START_TIME, END_TIME, STATUS, COMMIT_COUNT, READ_COUNT, FILTER_COUNT, WRITE_COUNT, READ_SKIP_COUNT, WRITE_SKIP_COUNT, PROCESS_SKIP_COUNT, ROLLBACK_COUNT, EXIT_CODE, EXIT_MESSAGE, LAST_UPDATED, 'boot3' AS SCHEMA_TARGET FROM BOOT3_BATCH_STEP_EXECUTION"; + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(CREATE_AGGREGATE_TASK_EXECUTION_VIEW), + SqlCommand.from(CREATE_AGGREGATE_TASK_EXECUTION_PARAMS_VIEW), + SqlCommand.from(CREATE_AGGREGATE_TASK_BATCH_VIEW), + SqlCommand.from(CREATE_AGGREGATE_JOB_EXECUTION_VIEW), + SqlCommand.from(CREATE_AGGREGATE_JOB_INSTANCE_VIEW), + SqlCommand.from(CREATE_AGGREGATE_STEP_EXECUTION_VIEW)); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java index 3dfd7e0243..77fc6bc0b4 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBoot3InitialSetupMigration.java @@ -27,7 +27,6 @@ * @author Chris Bono */ public abstract class AbstractBoot3InitialSetupMigration extends AbstractMigration { - public AbstractBoot3InitialSetupMigration() { super(null); } @@ -53,4 +52,5 @@ public List getCommands() { * @return the list of sql commands */ public abstract List createBatch5Tables(); + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java new file mode 100644 index 0000000000..ed9b01321c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractCaseSensitiveMigration.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; + +/** + * Provides for converting lower case table names to uppercase to ensure it works correctly with a MariaDB or MySQL installation with case-sensitive table or column names. + * @author Corneil du Plessis + */ +public abstract class AbstractCaseSensitiveMigration extends AbstractMigration { + protected final static String RENAME_TASK_EXECUTION_METADATA_LC = "alter table task_execution_metadata rename to task_execution_metadata_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA = "alter table task_execution_metadata_lc rename to TASK_EXECUTION_METADATA"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL = "alter table task_execution_metadata_seq rename to task_execution_metadata_seq_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_TBL = "alter table task_execution_metadata_seq_lc rename to TASK_EXECUTION_METADATA_SEQ"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ_LC = "alter sequence task_execution_metadata_seq rename to task_execution_metadata_seq_lc"; + + protected final static String RENAME_TASK_EXECUTION_METADATA_SEQ = "alter sequence task_execution_metadata_seq_lc rename to TASK_EXECUTION_METADATA_SEQ"; + + protected final static String CREATE_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "CREATE SEQUENCE task_execution_metadata_seq_lc"; + + protected final static String ALTER_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "select setval(task_execution_metadata_seq_lc, select nextval(task_execution_metadata_seq), false)"; + + protected final static String DROP_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "drop sequence task_execution_metadata_seq"; + + protected final static String CREATE_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "create sequence TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + protected final static String ALTER_SEQUENCE_TASK_EXECUTION_METADATA_SEQ = "select setval(TASK_EXECUTION_METADATA_SEQ, select nextval(task_execution_metadata_seq_lc), false)"; + + protected final static String DROP_SEQUENCE_TASK_EXECUTION_METADATA_SEQ_LC = "drop sequence task_execution_metadata_seq_lc"; + + public AbstractCaseSensitiveMigration() { + super(null); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java new file mode 100644 index 0000000000..05a1adb08a --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/PostgreSQLTextToOID.java @@ -0,0 +1,66 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.db.migration; + +import javax.sql.DataSource; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Provides for converting text or longtext fields in PostgreSQL to OID. + * + * @author Corneil du Plessis + */ +public class PostgreSQLTextToOID { + private final static Logger logger = LoggerFactory.getLogger(PostgreSQLTextToOID.class); + + private final static String ADD_TMP_OID_COL = "alter table %s add column %s oid"; + + private final static String UPDATE_TMP_OID_COL = "update %s set %s = lo_from_bytea(0, %s::bytea), %s = null where %s in (select %s from %s where %s is null and %s is not null limit 100)"; + + private final static String DROP_ORIGINAL_COL = "alter table %s drop column %s"; + + private final static String RENAME_OID_COL = "alter table %s rename column %s to %s"; + + public static void convertColumn(String table, String id, String column, DataSource dataSource) { + JdbcTemplate template = new JdbcTemplate(dataSource); + final String tmp_col = column + "_tmp"; + String sqlTmp = String.format(ADD_TMP_OID_COL, table, tmp_col); + logger.debug("Executing:{}", sqlTmp); + template.update(sqlTmp); + int total = 0; + do { + String sql = String.format(UPDATE_TMP_OID_COL, table, tmp_col, column, column, id, id, table, tmp_col, column); + logger.debug("Executing:{}", sql); + int count = template.update(sql); + total += count; + if (count <= 0) { + logger.info("Updated {} rows of {} in {}", total, column, table); + break; + } + } while (true); + String sqlDrop = String.format(DROP_ORIGINAL_COL, table, column); + logger.debug("Executing:{}", sqlDrop); + template.update(sqlDrop); + String sqlRename = String.format(RENAME_OID_COL, table, tmp_col, column); + logger.debug("Executing:{}", sqlRename); + template.update(sqlRename); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java index efda8ac276..e50476e369 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -31,144 +31,146 @@ */ public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { - public final static String CREATE_TASK_EXECUTION_TABLE = - "CREATE TABLE BOOT3_TASK_EXECUTION\n" + - "(\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + - " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP(9),\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ")"; - - public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = - "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS\n" + - "(\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_TASK_TASK_BATCH = - "CREATE TABLE BOOT3_TASK_TASK_BATCH\n" + - "(\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_TASK_LOCK_TABLE = - "CREATE TABLE BOOT3_TASK_LOCK\n" + - "(\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE TIMESTAMP(9) NOT NULL,\n" + - " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ")"; + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE TIMESTAMP(9) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = "CREATE SEQUENCE BOOT3_TASK_SEQ AS BIGINT START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NOCACHE NOCYCLE"; - public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = - "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE\n" + - "(\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ")"; - - public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = - "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION\n" + - "(\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + - " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + - " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP(9),\n" + - " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + - ")"; - - public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = - "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS\n" + - "(\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + - " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + - " PARAMETER_VALUE VARCHAR(2500),\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = - "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION\n" + - "(\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + - " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + - " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP(9),\n" + - " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = - "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT\n" + - "(\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT CLOB,\n" + - " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = - "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT\n" + - "(\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT CLOB,\n" + - " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST CLOB,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + " START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ")"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; - public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = + private final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; - public final static String CREATE_BATCH_JOB_SEQUENCE = + private final static String CREATE_BATCH_JOB_SEQUENCE = "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ AS BIGINT MAXVALUE 9223372036854775807 NO CYCLE"; @Override @@ -178,7 +180,10 @@ public List createTask3Tables() { SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), SqlCommand.from(CREATE_TASK_TASK_BATCH), SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), - SqlCommand.from(CREATE_TASK_LOCK_TABLE)); + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); } @Override @@ -192,6 +197,8 @@ public List createBatch5Tables() { SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), - SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE)); + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE) + ); } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java new file mode 100644 index 0000000000..bb0b309056 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/db2/V8__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.db2; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java index 0ff179bac5..de3d5cd3f3 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V6__Boot3_Add_Task3_Batch5_Schema.java @@ -31,136 +31,149 @@ */ public class V6__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { - public final static String CREATE_TASK_EXECUTION_TABLE = + private final static String CREATE_TASK_EXECUTION_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ")"; - - public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; - public final static String CREATE_TASK_TASK_BATCH = + private final static String CREATE_TASK_TASK_BATCH = "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; - public final static String CREATE_TASK_LOCK_TABLE = + private final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE BOOT3_TASK_LOCK (\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE DATETIME(6) NOT NULL,\n" + - " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ")"; - - private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; - - public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_SEQ = + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST LONGTEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ")"; - - public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + - " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + - ")"; - - public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + - " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + - " PARAMETER_VALUE VARCHAR(2500),\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) NOT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY ,\n" + + "VERSION BIGINT NOT NULL,\n" + + "STEP_NAME VARCHAR(100) NOT NULL,\n" + + "JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + "CREATE_TIME DATETIME(6) NOT NULL,\n" + + "START_TIME DATETIME(6) DEFAULT NULL ,\n" + + "END_TIME DATETIME(6) DEFAULT NULL ,\n" + + "STATUS VARCHAR(10) ,\n" + + "COMMIT_COUNT BIGINT ,\n" + + "READ_COUNT BIGINT ,\n" + + "FILTER_COUNT BIGINT ,\n" + + "WRITE_COUNT BIGINT ,\n" + + "READ_SKIP_COUNT BIGINT ,\n" + + "WRITE_SKIP_COUNT BIGINT ,\n" + + "PROCESS_SKIP_COUNT BIGINT ,\n" + + "ROLLBACK_COUNT BIGINT ,\n" + + "EXIT_CODE VARCHAR(2500) ,\n" + + "EXIT_MESSAGE VARCHAR(2500) ,\n" + + "LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + - ")"; - - public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; - public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + private final static String CREATE_BATCH_STEP_EXECUTION_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; - public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + private final static String CREATE_BATCH_JOB_EXECUTION_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_SEQ = + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE ENGINE=InnoDB"; - public final static String CREATE_BATCH_JOB_SEQUENCE = - "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; @Override public List createTask3Tables() { @@ -168,8 +181,11 @@ public List createTask3Tables() { SqlCommand.from(CREATE_TASK_EXECUTION_TABLE), SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), SqlCommand.from(CREATE_TASK_TASK_BATCH), - SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), - SqlCommand.from(CREATE_TASK_LOCK_TABLE)); + SqlCommand.from(CREATE_TASK_SEQ), + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); } @Override @@ -181,8 +197,10 @@ public List createBatch5Tables() { SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), - SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), - SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), - SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE)); + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_SEQ) + ); } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java new file mode 100644 index 0000000000..503f1db50f --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V8__RenameLowerCaseTables.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import java.util.Arrays; +import java.util.List; + +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractCaseSensitiveMigration; +import org.springframework.jdbc.core.JdbcTemplate; + +/** + * Since MariaDB operates in a case-sensitive mode for table and column names we need TASK_ tables referenced with a prefix to be uppercase. + * + * @author Corneil du Plessis + */ +public class V8__RenameLowerCaseTables extends AbstractCaseSensitiveMigration { + + + @Override + public List getCommands() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_LC), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_TBL) + ); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java new file mode 100644 index 0000000000..7152079689 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mariadb/V9__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mariadb; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V9__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java index b1a628f91d..03a2af1b4e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -32,187 +32,201 @@ */ public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetupMigration { - public final static String CREATE_TASK_EXECUTION_TABLE = - "CREATE TABLE BOOT3_TASK_EXECUTION\n" + - "(\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = - "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS\n" + - "(\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_TASK_TASK_BATCH = - "CREATE TABLE BOOT3_TASK_TASK_BATCH\n" + - "(\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_TASK_LOCK_TABLE = - "CREATE TABLE BOOT3_TASK_LOCK\n" + - "(\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE DATETIME(6) NOT NULL,\n" + - " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ") ENGINE=InnoDB"; + private final static String CREATE_TASK_EXECUTION_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_TASK_BATCH = + "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_TASK_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_TASK_LOCK_TABLE = + "CREATE TABLE BOOT3_TASK_LOCK (\n" + + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME(6) NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ") ENGINE=InnoDB"; private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE TABLE BOOT3_TASK_SEQ\n" + - "(\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ") ENGINE=InnoDB"; + "CREATE TABLE BOOT3_TASK_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + private final static String INIT_TASK_SEQ = "INSERT INTO BOOT3_TASK_SEQ (ID, UNIQUE_KEY)\n" + - "select *\n" + - "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp"; + + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST TEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; - public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ")"; + + private final static String INIT_TASK_EXECUTION_METADATA_SEQ = + "INSERT INTO BOOT3_TASK_EXECUTION_METADATA_SEQ (ID, UNIQUE_KEY)\n" + + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_TASK_EXECUTION_METADATA_SEQ)"; + + private final static String CREATE_BATCH_JOB_INSTANCE_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE\n" + - "(\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = + "(\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION\n" + - "(\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = - "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS\n" + - "(\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + - " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + - " PARAMETER_VALUE VARCHAR(2500),\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = - "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION\n" + - "(\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = + "(\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE (JOB_INSTANCE_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_TABLE = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME(6) NOT NULL,\n" + + " START_TIME DATETIME(6) DEFAULT NULL,\n" + + " END_TIME DATETIME(6) DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME(6),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT\n" + - "(\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = + "(\n" + + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION (STEP_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT\n" + - "(\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + - ") ENGINE=InnoDB"; - - public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = - "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_SEQ\n" + - "(\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint BOOT3_UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ") ENGINE=InnoDB"; - - public final static String INIT_BATCH_STEP_EXECUTION_SEQ = + "(\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION (JOB_EXECUTION_ID)\n" + + ") ENGINE=InnoDB"; + + private final static String CREATE_BATCH_STEP_EXECUTION_SEQ = + "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_STEP_EXECUTION_SEQ = "INSERT INTO BOOT3_BATCH_STEP_EXECUTION_SEQ (ID, UNIQUE_KEY)\n" + - "select *\n" + - "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + - "where not exists(select * from BOOT3_BATCH_STEP_EXECUTION_SEQ)"; - - public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = - "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_SEQ\n" + - "(\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint BOOT3_UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ") ENGINE=InnoDB"; - - public final static String INIT_BATCH_JOB_EXECUTION_SEQ = + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_STEP_EXECUTION_SEQ)"; + + private final static String CREATE_BATCH_JOB_EXECUTION_SEQ = + "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_JOB_EXECUTION_SEQ = "INSERT INTO BOOT3_BATCH_JOB_EXECUTION_SEQ (ID, UNIQUE_KEY)\n" + - "select *\n" + - "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + - "where not exists(select * from BOOT3_BATCH_JOB_EXECUTION_SEQ)"; - - public final static String CREATE_BATCH_JOB_SEQUENCE = - "CREATE TABLE BOOT3_BATCH_JOB_SEQ\n" + - "(\n" + - " ID BIGINT NOT NULL,\n" + - " UNIQUE_KEY CHAR(1) NOT NULL,\n" + - " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + - ") ENGINE=InnoDB"; - - public final static String INIT_BATCH_JOB_SEQ = + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_JOB_EXECUTION_SEQ)"; + + private final static String CREATE_BATCH_JOB_SEQ = + "CREATE TABLE BOOT3_BATCH_JOB_SEQ (\n" + + " ID BIGINT NOT NULL,\n" + + " UNIQUE_KEY CHAR(1) NOT NULL,\n" + + " constraint UNIQUE_KEY_UN unique (UNIQUE_KEY)\n" + + ") ENGINE=InnoDB"; + + private final static String INIT_BATCH_JOB_SEQ = "INSERT INTO BOOT3_BATCH_JOB_SEQ (ID, UNIQUE_KEY)\n" + - "select *\n" + - "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + - "where not exists(select * from BOOT3_BATCH_JOB_SEQ)"; + "select *\n" + + "from (select 0 as ID, '0' as UNIQUE_KEY) as tmp\n" + + "where not exists(select * from BOOT3_BATCH_JOB_SEQ)"; + @Override public List createTask3Tables() { return Arrays.asList( @@ -221,7 +235,10 @@ public List createTask3Tables() { SqlCommand.from(CREATE_TASK_TASK_BATCH), SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), SqlCommand.from(CREATE_TASK_LOCK_TABLE), - SqlCommand.from(INIT_TASK_SEQ)); + SqlCommand.from(INIT_TASK_SEQ), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ_TABLE), + SqlCommand.from(INIT_TASK_EXECUTION_METADATA_SEQ)); } @Override @@ -233,11 +250,13 @@ public List createBatch5Tables() { SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_TABLE), SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE), SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), - SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), - SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), - SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE), + SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQ), SqlCommand.from(INIT_BATCH_STEP_EXECUTION_SEQ), + SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQ), SqlCommand.from(INIT_BATCH_JOB_EXECUTION_SEQ), - SqlCommand.from(INIT_BATCH_JOB_SEQ)); + SqlCommand.from(CREATE_BATCH_JOB_SEQ), + SqlCommand.from(INIT_BATCH_JOB_SEQ) + ); } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java new file mode 100644 index 0000000000..4d003291dd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V8__RenameLowerCaseTables.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.cloud.dataflow.common.flyway.SqlCommand; +import org.springframework.cloud.dataflow.server.db.migration.AbstractCaseSensitiveMigration; +/** + * Since MySQL/MariaDB operates in a case-sensitive mode for table and column names we need TASK_ tables referenced with a prefix to be uppercase. + * @author Corneil du Plessis + */ +public class V8__RenameLowerCaseTables extends AbstractCaseSensitiveMigration { + public List getCommands() { + return Arrays.asList( + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_LC), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_LC_TBL), + SqlCommand.from(RENAME_TASK_EXECUTION_METADATA_SEQ_TBL) + ); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java new file mode 100644 index 0000000000..c410878f66 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/mysql/V9__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.mysql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V9__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java index ae421ad7be..1c27ef910b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -33,125 +33,138 @@ public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetup public final static String CREATE_TASK_EXECUTION_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ")"; + "TASK_EXECUTION_ID NUMBER NOT NULL PRIMARY KEY ,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "TASK_NAME VARCHAR2(100),\n" + + "EXIT_CODE INTEGER,\n" + + "EXIT_MESSAGE VARCHAR2(2500),\n" + + "ERROR_MESSAGE VARCHAR2(2500),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + "EXTERNAL_EXECUTION_ID VARCHAR2(255),\n" + + "PARENT_EXECUTION_ID NUMBER\n" + + ")SEGMENT CREATION IMMEDIATE"; public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + "TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + "TASK_PARAM VARCHAR2(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")SEGMENT CREATION IMMEDIATE"; public final static String CREATE_TASK_TASK_BATCH = "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + "TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + "JOB_EXECUTION_ID NUMBER NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE BOOT3_TASK_LOCK (\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE DATETIME(6) NOT NULL,\n" + - " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ")"; + "LOCK_KEY VARCHAR2(36) NOT NULL,\n" + + "REGION VARCHAR2(100) NOT NULL,\n" + + "CLIENT_ID VARCHAR2(36),\n" + + "CREATED_DATE TIMESTAMP(9) NOT NULL,\n" + + "constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")SEGMENT CREATION IMMEDIATE"; private final static String CREATE_TASK_SEQ_SEQUENCE = "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID NUMBER NOT NULL,\n" + + " TASK_EXECUTION_ID NUMBER NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST CLOB,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 ORDER NOCYCLE"; + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ")"; + "JOB_INSTANCE_ID NUMBER(19,0) NOT NULL PRIMARY KEY ,\n" + + "VERSION NUMBER(19,0) ,\n" + + "JOB_NAME VARCHAR2(100 char) NOT NULL,\n" + + "JOB_KEY VARCHAR2(32 char) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ") SEGMENT CREATION IMMEDIATE"; public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + - " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + - ")"; + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "VERSION NUMBER(19,0),\n" + + "JOB_INSTANCE_ID NUMBER(19,0) NOT NULL,\n" + + "CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "STATUS VARCHAR2(10 char),\n" + + "EXIT_CODE VARCHAR2(2500 char),\n" + + "EXIT_MESSAGE VARCHAR2(2500 char),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + "JOB_CONFIGURATION_LOCATION VARCHAR(2500 char) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + "references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + - " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + - " PARAMETER_VALUE VARCHAR(2500),\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL,\n" + + "TYPE_CD VARCHAR2(6 char) NOT NULL,\n" + + "KEY_NAME VARCHAR2(100 char) NOT NULL,\n" + + "STRING_VAL VARCHAR2(250 char),\n" + + "DATE_VAL TIMESTAMP(9) DEFAULT NULL,\n" + + "LONG_VAL NUMBER(19,0),\n" + + "DOUBLE_VAL NUMBER,\n" + + "IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) NOT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + "STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "VERSION NUMBER(19,0) NOT NULL,\n" + + "STEP_NAME VARCHAR2(100 char) NOT NULL,\n" + + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL,\n" + + "CREATE_TIME TIMESTAMP(9) NOT NULL,\n" + + "START_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "END_TIME TIMESTAMP(9) DEFAULT NULL,\n" + + "STATUS VARCHAR2(10 char),\n" + + "COMMIT_COUNT NUMBER(19,0),\n" + + "READ_COUNT NUMBER(19,0),\n" + + "FILTER_COUNT NUMBER(19,0),\n" + + "WRITE_COUNT NUMBER(19,0),\n" + + "READ_SKIP_COUNT NUMBER(19,0),\n" + + "WRITE_SKIP_COUNT NUMBER(19,0),\n" + + "PROCESS_SKIP_COUNT NUMBER(19,0),\n" + + "ROLLBACK_COUNT NUMBER(19,0),\n" + + "EXIT_CODE VARCHAR2(2500 char),\n" + + "EXIT_MESSAGE VARCHAR2(2500 char),\n" + + "LAST_UPDATED TIMESTAMP(9),\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + - ")"; + "STEP_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL,\n" + + "SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + "JOB_EXECUTION_ID NUMBER(19,0) NOT NULL PRIMARY KEY,\n" + + "SHORT_CONTEXT VARCHAR2(2500 char) NOT NULL,\n" + + "SERIALIZED_CONTEXT CLOB,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ") SEGMENT CREATION IMMEDIATE"; public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; @@ -169,7 +182,9 @@ public List createTask3Tables() { SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), SqlCommand.from(CREATE_TASK_TASK_BATCH), SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), - SqlCommand.from(CREATE_TASK_LOCK_TABLE)); + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ)); } @Override diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java new file mode 100644 index 0000000000..75f9af4815 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/oracle/V8__AddAggregateViews.java @@ -0,0 +1,7 @@ +package org.springframework.cloud.dataflow.server.db.migration.oracle; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java index 622b6fd45a..1898bab5d3 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -33,125 +33,137 @@ public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetup public final static String CREATE_TASK_EXECUTION_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME TIMESTAMP DEFAULT NULL,\n" + - " END_TIME TIMESTAMP DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; public final static String CREATE_TASK_TASK_BATCH = "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE BOOT3_TASK_LOCK (\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE TIMESTAMP NOT NULL,\n" + - " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ")"; + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE TIMESTAMP NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; private final static String CREATE_TASK_SEQ_SEQUENCE = "CREATE SEQUENCE BOOT3_TASK_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + private final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST TEXT,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ");"; + + private final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; + public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ")"; + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME TIMESTAMP NOT NULL,\n" + - " START_TIME TIMESTAMP DEFAULT NULL,\n" + - " END_TIME TIMESTAMP DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + - " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP NOT NULL,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + - " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + - " PARAMETER_VALUE VARCHAR(2500),\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " CREATE_TIME TIMESTAMP NOT NULL,\n" + - " START_TIME TIMESTAMP NOT NULL,\n" + - " END_TIME TIMESTAMP DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME TIMESTAMP NOT NULL,\n" + + " START_TIME TIMESTAMP DEFAULT NULL,\n" + + " END_TIME TIMESTAMP DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED TIMESTAMP,\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ MAXVALUE 9223372036854775807 NO CYCLE"; @@ -169,7 +181,10 @@ public List createTask3Tables() { SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), SqlCommand.from(CREATE_TASK_TASK_BATCH), SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), - SqlCommand.from(CREATE_TASK_LOCK_TABLE)); + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ) + ); } @Override @@ -183,6 +198,8 @@ public List createBatch5Tables() { SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE), SqlCommand.from(CREATE_BATCH_STEP_EXECUTION_SEQUENCE), SqlCommand.from(CREATE_BATCH_JOB_EXECUTION_SEQUENCE), - SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE)); + SqlCommand.from(CREATE_BATCH_JOB_SEQUENCE) + ); } + } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java new file mode 100644 index 0000000000..f85e45b606 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V8__AddAggregateViews.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java new file mode 100644 index 0000000000..6b084cce61 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/postgresql/V9__ChangeTextTypes.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.postgresql; + +import org.flywaydb.core.api.migration.Context; + +import org.springframework.cloud.dataflow.common.flyway.AbstractMigration; + +import static org.springframework.cloud.dataflow.server.db.migration.PostgreSQLTextToOID.convertColumn; +public class V9__ChangeTextTypes extends AbstractMigration { + + public V9__ChangeTextTypes() { + super(null); + } + + + @Override + public void migrate(Context context) throws Exception { + convertColumn("app_registration", "id", "uri", context.getConfiguration().getDataSource()); + convertColumn("app_registration", "id", "metadata_uri", context.getConfiguration().getDataSource()); + //convertColumn("audit_records", "id", "audit_data", context.getConfiguration().getDataSource()); + convertColumn("stream_definitions", "definition_name", "definition", context.getConfiguration().getDataSource()); + convertColumn("stream_definitions", "definition_name", "original_definition", context.getConfiguration().getDataSource()); + convertColumn("task_definitions", "definition_name", "definition", context.getConfiguration().getDataSource()); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java index 168c3ee9b5..4b812668a2 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V7__Boot3_Add_Task3_Batch5_Schema.java @@ -33,134 +33,146 @@ public class V7__Boot3_Add_Task3_Batch5_Schema extends AbstractBoot3InitialSetup public final static String CREATE_TASK_EXECUTION_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " TASK_NAME VARCHAR(100),\n" + - " EXIT_CODE INTEGER,\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " ERROR_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED TIMESTAMP,\n" + - " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + - " PARENT_EXECUTION_ID BIGINT\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " START_TIME DATETIME2 DEFAULT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " TASK_NAME VARCHAR(100),\n" + + " EXIT_CODE INTEGER,\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " ERROR_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " EXTERNAL_EXECUTION_ID VARCHAR(255),\n" + + " PARENT_EXECUTION_ID BIGINT\n" + + ")"; public final static String CREATE_TASK_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_TASK_EXECUTION_PARAMS (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " TASK_PARAM VARCHAR(2500),\n" + - " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_PARAM VARCHAR(2500),\n" + + " constraint BOOT3_TASK_EXEC_PARAMS_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; public final static String CREATE_TASK_TASK_BATCH = "CREATE TABLE BOOT3_TASK_TASK_BATCH (\n" + - " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + - " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + - ")"; + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " constraint BOOT3_EXEC_BATCH_FK foreign key (TASK_EXECUTION_ID)\n" + + " references BOOT3_TASK_EXECUTION(TASK_EXECUTION_ID)\n" + + ")"; public final static String CREATE_TASK_LOCK_TABLE = "CREATE TABLE BOOT3_TASK_LOCK (\n" + - " LOCK_KEY CHAR(36) NOT NULL,\n" + - " REGION VARCHAR(100) NOT NULL,\n" + - " CLIENT_ID CHAR(36),\n" + - " CREATED_DATE DATETIME(6) NOT NULL,\n" + - " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + - ")"; + " LOCK_KEY CHAR(36) NOT NULL,\n" + + " REGION VARCHAR(100) NOT NULL,\n" + + " CLIENT_ID CHAR(36),\n" + + " CREATED_DATE DATETIME2 NOT NULL,\n" + + " constraint BOOT3_LOCK_PK primary key (LOCK_KEY, REGION)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_METADATA_TABLE = + "CREATE TABLE BOOT3_TASK_EXECUTION_METADATA (\n" + + " ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_ID BIGINT NOT NULL,\n" + + " TASK_EXECUTION_MANIFEST VARCHAR(MAX) NULL,\n" + + " primary key (ID),\n" + + " CONSTRAINT BOOT3_TASK_METADATA_FK FOREIGN KEY (TASK_EXECUTION_ID) REFERENCES BOOT3_TASK_EXECUTION (TASK_EXECUTION_ID)\n" + + ")"; + + public final static String CREATE_TASK_EXECUTION_METADATA_SEQ = + "CREATE SEQUENCE BOOT3_TASK_EXECUTION_METADATA_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; private final static String CREATE_TASK_SEQ_SEQUENCE = - "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + "CREATE SEQUENCE BOOT3_TASK_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; public final static String CREATE_BATCH_JOB_INSTANCE_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_INSTANCE (\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_KEY VARCHAR(32) NOT NULL,\n" + - " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + - ")"; + " JOB_INSTANCE_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_KEY VARCHAR(32) NOT NULL,\n" + + " constraint BOOT3_JOB_INST_UN unique (JOB_NAME, JOB_KEY)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT,\n" + - " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) DEFAULT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + - " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + - " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT,\n" + + " JOB_INSTANCE_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME2 NOT NULL,\n" + + " START_TIME DATETIME2 DEFAULT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL,\n" + + " constraint BOOT3_JOB_INST_EXEC_FK foreign key (JOB_INSTANCE_ID)\n" + + " references BOOT3_BATCH_JOB_INSTANCE(JOB_INSTANCE_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_PARAMS_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_PARAMS (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + - " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + - " PARAMETER_VALUE VARCHAR(2500),\n" + - " IDENTIFYING CHAR(1) NOT NULL,\n" + - " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " PARAMETER_NAME VARCHAR(100) NOT NULL,\n" + + " PARAMETER_TYPE VARCHAR(100) NOT NULL,\n" + + " PARAMETER_VALUE VARCHAR(2500),\n" + + " IDENTIFYING CHAR(1) NOT NULL,\n" + + " constraint BOOT3_JOB_EXEC_PARAMS_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " VERSION BIGINT NOT NULL,\n" + - " STEP_NAME VARCHAR(100) NOT NULL,\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + - " CREATE_TIME DATETIME(6) NOT NULL,\n" + - " START_TIME DATETIME(6) NOT NULL,\n" + - " END_TIME DATETIME(6) DEFAULT NULL,\n" + - " STATUS VARCHAR(10),\n" + - " COMMIT_COUNT BIGINT,\n" + - " READ_COUNT BIGINT,\n" + - " FILTER_COUNT BIGINT,\n" + - " WRITE_COUNT BIGINT,\n" + - " READ_SKIP_COUNT BIGINT,\n" + - " WRITE_SKIP_COUNT BIGINT,\n" + - " PROCESS_SKIP_COUNT BIGINT,\n" + - " ROLLBACK_COUNT BIGINT,\n" + - " EXIT_CODE VARCHAR(2500),\n" + - " EXIT_MESSAGE VARCHAR(2500),\n" + - " LAST_UPDATED DATETIME(6),\n" + - " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " VERSION BIGINT NOT NULL,\n" + + " STEP_NAME VARCHAR(100) NOT NULL,\n" + + " JOB_EXECUTION_ID BIGINT NOT NULL,\n" + + " CREATE_TIME DATETIME2 NOT NULL,\n" + + " START_TIME DATETIME2 NOT NULL,\n" + + " END_TIME DATETIME2 DEFAULT NULL,\n" + + " STATUS VARCHAR(10),\n" + + " COMMIT_COUNT BIGINT,\n" + + " READ_COUNT BIGINT,\n" + + " FILTER_COUNT BIGINT,\n" + + " WRITE_COUNT BIGINT,\n" + + " READ_SKIP_COUNT BIGINT,\n" + + " WRITE_SKIP_COUNT BIGINT,\n" + + " PROCESS_SKIP_COUNT BIGINT,\n" + + " ROLLBACK_COUNT BIGINT,\n" + + " EXIT_CODE VARCHAR(2500),\n" + + " EXIT_MESSAGE VARCHAR(2500),\n" + + " LAST_UPDATED DATETIME2,\n" + + " constraint BOOT3_JOB_EXEC_STEP_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_STEP_EXECUTION_CONTEXT (\n" + - " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + - " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + - ")"; + " STEP_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_STEP_EXEC_CTX_FK foreign key (STEP_EXECUTION_ID)\n" + + " references BOOT3_BATCH_STEP_EXECUTION(STEP_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_JOB_EXECUTION_CONTEXT_TABLE = "CREATE TABLE BOOT3_BATCH_JOB_EXECUTION_CONTEXT (\n" + - " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + - " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + - " SERIALIZED_CONTEXT TEXT,\n" + - " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + - " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + - ")"; + " JOB_EXECUTION_ID BIGINT NOT NULL PRIMARY KEY,\n" + + " SHORT_CONTEXT VARCHAR(2500) NOT NULL,\n" + + " SERIALIZED_CONTEXT TEXT,\n" + + " constraint BOOT3_JOB_EXEC_CTX_FK foreign key (JOB_EXECUTION_ID)\n" + + " references BOOT3_BATCH_JOB_EXECUTION(JOB_EXECUTION_ID)\n" + + ")"; public final static String CREATE_BATCH_STEP_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + "CREATE SEQUENCE BOOT3_BATCH_STEP_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; public final static String CREATE_BATCH_JOB_EXECUTION_SEQUENCE = - "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + "CREATE SEQUENCE BOOT3_BATCH_JOB_EXECUTION_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; public final static String CREATE_BATCH_JOB_SEQUENCE = - "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 1 MINVALUE 1 MAXVALUE 9223372036854775806 INCREMENT BY 1 NOCACHE NOCYCLE"; + "CREATE SEQUENCE BOOT3_BATCH_JOB_SEQ START WITH 0 MINVALUE 0 MAXVALUE 9223372036854775807 NO CACHE NO CYCLE"; @Override public List createTask3Tables() { @@ -169,7 +181,9 @@ public List createTask3Tables() { SqlCommand.from(CREATE_TASK_EXECUTION_PARAMS_TABLE), SqlCommand.from(CREATE_TASK_TASK_BATCH), SqlCommand.from(CREATE_TASK_SEQ_SEQUENCE), - SqlCommand.from(CREATE_TASK_LOCK_TABLE)); + SqlCommand.from(CREATE_TASK_LOCK_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_TABLE), + SqlCommand.from(CREATE_TASK_EXECUTION_METADATA_SEQ)); } @Override diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java new file mode 100644 index 0000000000..d5f32a2f27 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/sqlserver/V8__AddAggregateViews.java @@ -0,0 +1,22 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.db.migration.sqlserver; + +import org.springframework.cloud.dataflow.server.db.migration.AbstractAggregateViewMigration; + +public class V8__AddAggregateViews extends AbstractAggregateViewMigration { + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java index f4f0af2734..b282141601 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/TaskExplorerFactoryBean.java @@ -19,9 +19,9 @@ import javax.sql.DataSource; import org.springframework.beans.factory.FactoryBean; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaTaskExecutionDaoFactoryBean; import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.support.SimpleTaskExplorer; -import org.springframework.cloud.task.repository.support.TaskExecutionDaoFactoryBean; import org.springframework.util.Assert; /** @@ -31,19 +31,19 @@ */ public class TaskExplorerFactoryBean implements FactoryBean { - private DataSource dataSource; - + private final DataSource dataSource; private TaskExplorer taskExplorer; - - public TaskExplorerFactoryBean(DataSource dataSource) { + private final String tablePrefix; + public TaskExplorerFactoryBean(DataSource dataSource, String tablePrefix) { Assert.notNull(dataSource, "dataSource must not be null"); this.dataSource = dataSource; + this.tablePrefix = tablePrefix; } @Override public TaskExplorer getObject() throws Exception { if (taskExplorer == null) { - taskExplorer = new SimpleTaskExplorer(new TaskExecutionDaoFactoryBean(dataSource)); + taskExplorer = new SimpleTaskExplorer(new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, tablePrefix)); } return taskExplorer; } @@ -53,9 +53,4 @@ public Class getObjectType() { return TaskExplorer.class; } - @Override - public boolean isSingleton() { - return true; - } - } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java index 889ca09c2c..7939e2679b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/job/support/StepExecutionResourceBuilder.java @@ -18,12 +18,19 @@ import org.springframework.batch.core.Step; import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; import org.springframework.batch.core.step.tasklet.TaskletStep; import org.springframework.cloud.dataflow.rest.job.support.StepType; import org.springframework.cloud.dataflow.rest.job.support.TaskletType; import org.springframework.cloud.dataflow.rest.resource.StepExecutionResource; +import org.springframework.cloud.dataflow.server.batch.NoSuchStepExecutionException; +import org.springframework.cloud.dataflow.server.controller.JobStepExecutionController; +import org.springframework.cloud.dataflow.server.controller.JobStepExecutionProgressController; import org.springframework.util.Assert; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + /** * Knows how to build a StepExecutionResource out of our domain model * {@link StepExecution}. @@ -33,8 +40,25 @@ */ public class StepExecutionResourceBuilder { - static public StepExecutionResource toModel(StepExecution entity) { - return new StepExecutionResource(entity.getJobExecution().getId(), entity, generateStepType(entity)); + static public StepExecutionResource toModel(StepExecution entity, String schemaTarget) { + StepExecutionResource resource = new StepExecutionResource(entity.getJobExecution().getId(), entity, generateStepType(entity), schemaTarget); + try { + resource.add( + linkTo( + methodOn(JobStepExecutionController.class) + .getStepExecution(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId(), schemaTarget) + ).withSelfRel() + ); + resource.add( + linkTo( + methodOn(JobStepExecutionProgressController.class) + .progress(resource.getStepExecution().getJobExecutionId(), resource.getStepExecution().getId(), schemaTarget) + ).withRel("progress") + ); + } catch (NoSuchStepExecutionException | NoSuchJobExecutionException e) { + throw new RuntimeException(e); + } + return resource; } private static String generateStepType(StepExecution stepExecution) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java new file mode 100644 index 0000000000..f29e0fe03c --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/AggregateJobQueryDao.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + + +import java.util.Date; +import java.util.List; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.cloud.dataflow.rest.job.JobInstanceExecutions; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +/** + * Provides for reading job execution data for Batch 4 and 5 schema versions. + * + * @author Corneil du Plessis + */ +public interface AggregateJobQueryDao { + Page listJobInstances(String jobName, Pageable pageable); + + Page listJobExecutions(String jobName, BatchStatus status, Pageable pageable) throws NoSuchJobExecutionException; + + Page listJobExecutionsBetween(Date fromDate, Date toDate, Pageable pageable); + + Page listJobExecutionsWithSteps(Pageable pageable); + + Page listJobExecutionsWithStepCount(Pageable pageable); + + Page listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(int jobInstanceId, String schemaTarget, Pageable pageable); + + Page listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(int taskExecutionId, String schemaTarget, Pageable pageable); + + Page listJobExecutionsForJobWithStepCount(String jobName, Pageable pageable) throws NoSuchJobException; + + TaskJobExecution getJobExecution(long id, String schemaTarget) throws NoSuchJobExecutionException; + + JobInstanceExecutions getJobInstanceExecution(String jobName, long instanceId); + + JobInstanceExecutions getJobInstanceExecutions(long id, String schemaTarget); +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowJobExecutionDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowJobExecutionDaoContainer.java new file mode 100644 index 0000000000..17bd4a9aac --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowJobExecutionDaoContainer.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +public class DataflowJobExecutionDaoContainer { + private final Map jobExecutionDaos = new HashMap<>(); + + public DataflowJobExecutionDaoContainer() { + } + + public void add(String name, DataflowJobExecutionDao jobExecutionDao) { + jobExecutionDaos.put(name, jobExecutionDao); + } + + public DataflowJobExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + DataflowJobExecutionDao result = jobExecutionDaos.get(schemaTarget); + Assert.notNull(result, "Expected to find jobExecutionDao for " + schemaTarget); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDaoContainer.java new file mode 100644 index 0000000000..7badea92bf --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionDaoContainer.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +public class DataflowTaskExecutionDaoContainer { + private final Map taskExecutionContainer = new HashMap<>(); + + public DataflowTaskExecutionDaoContainer() { + } + + public void add(String schemaTarget, DataflowTaskExecutionDao dataflowTaskExecutionDao) { + taskExecutionContainer.put(schemaTarget, dataflowTaskExecutionDao); + } + + public DataflowTaskExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + DataflowTaskExecutionDao result = taskExecutionContainer.get(schemaTarget); + Assert.notNull(result, "Expected DataflowTaskExecutionDao for " + schemaTarget); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java index c94bcb79e1..58bf0de99c 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDao.java @@ -28,6 +28,7 @@ */ public interface DataflowTaskExecutionMetadataDao { + /** * Saves a {@code TaskManifest} related to the supplied {@code TaskExecution} * diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDaoContainer.java new file mode 100644 index 0000000000..194a75663d --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DataflowTaskExecutionMetadataDaoContainer.java @@ -0,0 +1,47 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Provide container of DataflowTaskExecutionMetadataDao for each schema target; + * @author Corneil du Plessis + */ +public class DataflowTaskExecutionMetadataDaoContainer { + private final Map dataflowTaskExecutionMetadataDaos = new HashMap<>(); + + public DataflowTaskExecutionMetadataDaoContainer() { + } + + public void add(String schemaTarget, DataflowTaskExecutionMetadataDao dao) { + dataflowTaskExecutionMetadataDaos.put(schemaTarget, dao); + } + + public DataflowTaskExecutionMetadataDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + DataflowTaskExecutionMetadataDao result = dataflowTaskExecutionMetadataDaos.get(schemaTarget); + Assert.notNull(result, "Expected DataflowTaskExecutionMetadataDao for " + schemaTarget); + return result; + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java new file mode 100644 index 0000000000..bbe6ebfedc --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDefinitionReader.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.core.TaskDefinition; + +/** + * Provide a simple interface for reading Task Definitions when required by Aggregate Task Explorer + * @author Corneil du Plessis + */ +public class DefaultTaskDefinitionReader implements TaskDefinitionReader { + private final TaskDefinitionRepository taskDefinitionRepository; + + public DefaultTaskDefinitionReader(TaskDefinitionRepository taskDefinitionRepository) { + this.taskDefinitionRepository = taskDefinitionRepository; + } + + @Override + public TaskDefinition findTaskDefinition(String taskName) { + return taskDefinitionRepository.findByTaskName(taskName); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java new file mode 100644 index 0000000000..0806660aeb --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/DefaultTaskDeploymentReader.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.cloud.dataflow.server.repository; + +import org.springframework.cloud.dataflow.aggregate.task.TaskDeploymentReader; +import org.springframework.cloud.dataflow.core.TaskDeployment; + +/** + * Provide a simple interface for reading Task deployments when required by Aggregate Task Explorer + * @author Corneil du Plessis + */ +public class DefaultTaskDeploymentReader implements TaskDeploymentReader { + private final TaskDeploymentRepository taskDeploymentRepository; + + public DefaultTaskDeploymentReader(TaskDeploymentRepository taskDeploymentRepository) { + this.taskDeploymentRepository = taskDeploymentRepository; + } + + @Override + public TaskDeployment getDeployment(String externalTaskId) { + return taskDeploymentRepository.findByTaskDeploymentId(externalTaskId); + } + + @Override + public TaskDeployment getDeployment(String externalTaskId, String platform) { + return taskDeploymentRepository.findByTaskDeploymentIdAndPlatformName(externalTaskId, platform); + } + + @Override + public TaskDeployment findByDefinitionName(String definitionName) { + return taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(definitionName); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java new file mode 100644 index 0000000000..bceca6457b --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcAggregateJobQueryDao.java @@ -0,0 +1,774 @@ +/* + * Copyright 2019-2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import javax.sql.DataSource; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobInstance; +import org.springframework.batch.core.JobParameter; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.launch.NoSuchJobException; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.batch.core.repository.dao.JdbcJobExecutionDao; +import org.springframework.batch.core.repository.dao.StepExecutionDao; +import org.springframework.batch.item.database.Order; +import org.springframework.batch.item.database.PagingQueryProvider; +import org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean; +import org.springframework.cloud.dataflow.rest.job.JobInstanceExecutions; +import org.springframework.cloud.dataflow.rest.job.TaskJobExecution; +import org.springframework.cloud.dataflow.schema.AppBootSchemaVersion; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.batch.JobService; +import org.springframework.cloud.dataflow.server.service.JobServiceContainer; +import org.springframework.cloud.dataflow.server.service.impl.OffsetOutOfBoundsException; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.ResultSetExtractor; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Stores job execution information to a JDBC DataSource. Mirrors the {@link JdbcJobExecutionDao} + * but contains Spring Cloud Data Flow specific operations. This functionality might + * be migrated to Spring Batch itself eventually. + * + * @author Gunnar Hillert + * @author Corneil du Plessis + */ +public class JdbcAggregateJobQueryDao implements AggregateJobQueryDao { + private final static Logger logger = LoggerFactory.getLogger(JdbcAggregateJobQueryDao.class); + + private static final String GET_COUNT = "SELECT COUNT(1) from AGGREGATE_JOB_EXECUTION"; + + private static final String GET_COUNT_BY_DATE = "SELECT COUNT(1) from AGGREGATE_JOB_EXECUTION WHERE START_TIME BETWEEN ? AND ?"; + + private static final String GET_COUNT_BY_JOB_NAME = "SELECT COUNT(E.JOB_EXECUTION_ID) from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE I.JOB_NAME LIKE ?"; + + private static final String GET_COUNT_BY_STATUS = "SELECT COUNT(E.JOB_EXECUTION_ID) from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE E.STATUS = ?"; + + private static final String GET_COUNT_BY_JOB_INSTANCE_ID = "SELECT COUNT(E.JOB_EXECUTION_ID) from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + + " WHERE I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; + + private static final String GET_COUNT_BY_TASK_EXECUTION_ID = "SELECT COUNT(T.TASK_EXECUTION_ID) FROM AGGREGATE_JOB_EXECUTION E" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE T.TASK_EXECUTION_ID = ? AND T.SCHEMA_TARGET = ?"; + + private static final String GET_COUNT_BY_JOB_NAME_AND_STATUS = "SELECT COUNT(E.JOB_EXECUTION_ID) FROM AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID = E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET = E.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET" + + " WHERE I.JOB_NAME LIKE ? AND E.STATUS = ?"; + + private static final String FIELDS = "E.JOB_EXECUTION_ID as JOB_EXECUTION_ID, E.START_TIME as START_TIME," + + " E.END_TIME as END_TIME, E.STATUS as STATUS, E.EXIT_CODE as EXIT_CODE, E.EXIT_MESSAGE as EXIT_MESSAGE," + + " E.CREATE_TIME as CREATE_TIME, E.LAST_UPDATED as LAST_UPDATED, E.VERSION as VERSION," + + " I.JOB_INSTANCE_ID as JOB_INSTANCE_ID, I.JOB_NAME as JOB_NAME, T.TASK_EXECUTION_ID as TASK_EXECUTION_ID," + + " E.SCHEMA_TARGET as SCHEMA_TARGET"; + + private static final String FIELDS_WITH_STEP_COUNT = FIELDS + + ", (SELECT COUNT(*) FROM AGGREGATE_STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND S.SCHEMA_TARGET = E.SCHEMA_TARGET) as STEP_COUNT"; + + + private static final String GET_RUNNING_EXECUTIONS = "SELECT " + FIELDS + + " from AGGREGATE_JOB_EXECUTION E" + + " join AGGREGATE_JOB_INSTANCE I ON E.JOB_INSTANCE_ID = I.JOB_INSTANCE_ID AND E.SCHEMA_TARGET = I.SCHEMA_TARGET" + + " where and E.END_TIME is NULL"; + + private static final String NAME_FILTER = "I.JOB_NAME LIKE ?"; + + private static final String DATE_RANGE_FILTER = "E.START_TIME BETWEEN ? AND ?"; + + private static final String JOB_INSTANCE_ID_FILTER = "I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; + + private static final String STATUS_FILTER = "E.STATUS = ?"; + + private static final String NAME_AND_STATUS_FILTER = "I.JOB_NAME LIKE ? AND E.STATUS = ?"; + + private static final String TASK_EXECUTION_ID_FILTER = + "B.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND B.SCHEMA_TARGET = E.SCHEMA_TARGET AND B.TASK_EXECUTION_ID = ? AND E.SCHEMA_TARGET = ?"; + + + private static final String FROM_CLAUSE_TASK_EXEC_BATCH = "JOIN AGGREGATE_TASK_BATCH B ON E.JOB_EXECUTION_ID = B.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = B.SCHEMA_TARGET" + + " JOIN AGGREGATE_TASK_EXECUTION T ON B.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND B.SCHEMA_TARGET = T.SCHEMA_TARGET"; + + private static final String FIND_PARAMS_FROM_ID2 = "SELECT JOB_EXECUTION_ID, KEY_NAME, TYPE_CD, " + + "STRING_VAL, DATE_VAL, LONG_VAL, DOUBLE_VAL, IDENTIFYING, 'boot2' as SCHEMA_TARGET from %PREFIX%JOB_EXECUTION_PARAMS where JOB_EXECUTION_ID = ?"; + + private static final String FIND_PARAMS_FROM_ID3 = "SELECT JOB_EXECUTION_ID, PARAMETER_NAME, PARAMETER_TYPE, PARAMETER_VALUE, IDENTIFYING, 'boot3' as SCHEMA_TARGET" + + " from %PREFIX%JOB_EXECUTION_PARAMS where JOB_EXECUTION_ID = ?"; + + private static final String FIND_JOB_BY = "SELECT I.JOB_INSTANCE_ID as JOB_INSTANCE_ID, I.JOB_NAME as JOB_NAME, I.SCHEMA_TARGET as SCHEMA_TARGET," + + " E.JOB_EXECUTION_ID as JOB_EXECUTION_ID, E.START_TIME as START_TIME, E.END_TIME as END_TIME, E.STATUS as STATUS, E.EXIT_CODE as EXIT_CODE, E.EXIT_MESSAGE as EXIT_MESSAGE, E.CREATE_TIME as CREATE_TIME," + + " E.LAST_UPDATED as LAST_UPDATED, E.VERSION as VERSION, T.TASK_EXECUTION_ID as TASK_EXECUTION_ID," + + " (SELECT COUNT(*) FROM AGGREGATE_STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND S.SCHEMA_TARGET = E.SCHEMA_TARGET) as STEP_COUNT" + + " from AGGREGATE_JOB_INSTANCE I" + + " JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID = E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET = E.SCHEMA_TARGET" + + " LEFT OUTER JOIN AGGREGATE_TASK_BATCH TT ON E.JOB_EXECUTION_ID = TT.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = TT.SCHEMA_TARGET" + + " LEFT OUTER JOIN AGGREGATE_TASK_EXECUTION T ON TT.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND TT.SCHEMA_TARGET = T.SCHEMA_TARGET"; + + private static final String FIND_JOB_BY_NAME_INSTANCE_ID = FIND_JOB_BY + + " where I.JOB_NAME = ? AND I.JOB_INSTANCE_ID = ?"; + + private static final String FIND_JOB_BY_INSTANCE_ID_SCHEMA = FIND_JOB_BY + + " where I.JOB_INSTANCE_ID = ? AND I.SCHEMA_TARGET = ?"; + + private static final String FIND_JOBS_FIELDS = "I.JOB_INSTANCE_ID as JOB_INSTANCE_ID, I.JOB_NAME as JOB_NAME, I.SCHEMA_TARGET as SCHEMA_TARGET," + + " E.JOB_EXECUTION_ID as JOB_EXECUTION_ID, E.START_TIME as START_TIME, E.END_TIME as END_TIME, E.STATUS as STATUS, E.EXIT_CODE as EXIT_CODE, E.EXIT_MESSAGE as EXIT_MESSAGE, E.CREATE_TIME as CREATE_TIME," + + " E.LAST_UPDATED as LAST_UPDATED, E.VERSION as VERSION, T.TASK_EXECUTION_ID as TASK_EXECUTION_ID"; + + private static final String FIND_JOBS_FIELDS_WITH_STEP_COUNT = FIND_JOBS_FIELDS + + ", (SELECT COUNT(*) FROM AGGREGATE_STEP_EXECUTION S WHERE S.JOB_EXECUTION_ID = E.JOB_EXECUTION_ID AND S.SCHEMA_TARGET = E.SCHEMA_TARGET) as STEP_COUNT"; + + private static final String FIND_JOBS_FROM = "LEFT OUTER JOIN AGGREGATE_TASK_BATCH TT ON E.JOB_EXECUTION_ID = TT.JOB_EXECUTION_ID AND E.SCHEMA_TARGET = TT.SCHEMA_TARGET" + + " LEFT OUTER JOIN AGGREGATE_TASK_EXECUTION T ON TT.TASK_EXECUTION_ID = T.TASK_EXECUTION_ID AND TT.SCHEMA_TARGET = T.SCHEMA_TARGET"; + + private static final String FIND_JOBS_WHERE = "I.JOB_NAME LIKE ?"; + + private static final String FIND_BY_ID_SCHEMA = "E.JOB_EXECUTION_ID = ? AND E.SCHEMA_TARGET = ?"; + + private final PagingQueryProvider allExecutionsPagingQueryProvider; + + + private final PagingQueryProvider byJobNameAndStatusPagingQueryProvider; + + private final PagingQueryProvider byStatusPagingQueryProvider; + + private final PagingQueryProvider byJobNameWithStepCountPagingQueryProvider; + + + private final PagingQueryProvider executionsByDateRangeWithStepCountPagingQueryProvider; + + private final PagingQueryProvider byJobInstanceIdWithStepCountPagingQueryProvider; + + private final PagingQueryProvider byTaskExecutionIdWithStepCountPagingQueryProvider; + + private final PagingQueryProvider jobExecutionsPagingQueryProviderByName; + + private final PagingQueryProvider allExecutionsPagingQueryProviderNoStepCount; + + private final PagingQueryProvider byJobNamePagingQueryProvider; + + private final PagingQueryProvider byJobExecutionIdAndSchemaPagingQueryProvider; + + + private final DataSource dataSource; + + private final JdbcTemplate jdbcTemplate; + + private final SchemaService schemaService; + + private final JobServiceContainer jobServiceContainer; + + private final ConfigurableConversionService conversionService = new DefaultConversionService(); + + private final Map stepExecutionDaoContainer = new HashMap<>(); + + public JdbcAggregateJobQueryDao(DataSource dataSource, SchemaService schemaService, JobServiceContainer jobServiceContainer) throws Exception { + this.dataSource = dataSource; + this.jdbcTemplate = new JdbcTemplate(dataSource); + this.schemaService = schemaService; + this.jobServiceContainer = jobServiceContainer; + + allExecutionsPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, null); + + executionsByDateRangeWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, DATE_RANGE_FILTER); + allExecutionsPagingQueryProviderNoStepCount = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, null); + byStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, STATUS_FILTER); + byJobNameAndStatusPagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_AND_STATUS_FILTER); + byJobNamePagingQueryProvider = getPagingQueryProvider(FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER); + byJobNameWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, NAME_FILTER); + + byJobInstanceIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, JOB_INSTANCE_ID_FILTER); + byTaskExecutionIdWithStepCountPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, TASK_EXECUTION_ID_FILTER); + jobExecutionsPagingQueryProviderByName = getPagingQueryProvider(FIND_JOBS_FIELDS, FIND_JOBS_FROM, FIND_JOBS_WHERE, Collections.singletonMap("JOB_INSTANCE_ID", Order.DESCENDING)); + byJobExecutionIdAndSchemaPagingQueryProvider = getPagingQueryProvider(FIELDS_WITH_STEP_COUNT, FROM_CLAUSE_TASK_EXEC_BATCH, FIND_BY_ID_SCHEMA); + + } + + @Override + public Page listJobInstances(String jobName, Pageable pageable) { + int total = countJobExecutions(jobName); + List taskJobInstancesForJobName = total > 0 + ? getTaskJobInstancesForJobName(jobName, pageable) + : Collections.emptyList(); + return new PageImpl<>(taskJobInstancesForJobName, pageable, total); + + } + + @Override + public JobInstanceExecutions getJobInstanceExecution(String jobName, long instanceId) { + logger.debug("getJobInstanceExecution:{}:{}:{}", jobName, instanceId, FIND_JOB_BY_NAME_INSTANCE_ID); + List executions = jdbcTemplate.query(FIND_JOB_BY_NAME_INSTANCE_ID, new JobInstanceExecutionsExtractor(true), jobName, instanceId); + if (executions == null || executions.isEmpty()) { + return null; + } else if (executions.size() > 1) { + throw new RuntimeException("Expected a single JobInstanceExecutions not " + executions.size()); + } + return executions.get(0); + } + + @Override + public JobInstanceExecutions getJobInstanceExecutions(long jobInstanceId, String schemaTarget) { + List executions = jdbcTemplate.query(FIND_JOB_BY_INSTANCE_ID_SCHEMA, new JobInstanceExecutionsExtractor(true), jobInstanceId, schemaTarget); + if (executions == null || executions.isEmpty()) { + return null; + } else if (executions.size() > 1) { + throw new RuntimeException("Expected a single JobInstanceExecutions not " + executions.size()); + } + return executions.get(0); + } + + @Override + public Page listJobExecutions(String jobName, BatchStatus status, Pageable pageable) throws NoSuchJobExecutionException { + int total = countJobExecutions(jobName, status); + List executions = getJobExecutions(jobName, status, getPageOffset(pageable), pageable.getPageSize()); + Assert.isTrue(total >= executions.size(), () -> "Expected total at least " + executions.size() + " not " + total); + return new PageImpl<>(executions, pageable, total); + } + + @Override + public Page listJobExecutionsBetween(Date fromDate, Date toDate, Pageable pageable) { + int total = countJobExecutionsByDate(fromDate, toDate); + List executions = total > 0 + ? getTaskJobExecutionsByDate(fromDate, toDate, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(executions, pageable, total); + } + + + @Override + public Page listJobExecutionsWithSteps(Pageable pageable) { + int total = countJobExecutions(); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCount(getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public Page listJobExecutionsWithStepCount(Pageable pageable) { + int total = countJobExecutions(); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCount(getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public Page listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(int jobInstanceId, String schemaTarget, Pageable pageable) { + int total = countJobExecutionsByInstanceId(jobInstanceId, schemaTarget); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCountFilteredByJobInstanceId(jobInstanceId, schemaTarget, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + + @Override + public Page listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(int taskExecutionId, String schemaTarget, Pageable pageable) { + int total = countJobExecutionsByTaskExecutionId(taskExecutionId, schemaTarget); + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCountFilteredByTaskExecutionId(taskExecutionId, schemaTarget, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public Page listJobExecutionsForJobWithStepCount(String jobName, Pageable pageable) throws NoSuchJobException { + int total = countJobExecutions(jobName); + if(total == 0) { + throw new NoSuchJobException("No Job with that name either current or historic: [" + jobName + "]"); + } + List jobExecutions = total > 0 + ? getJobExecutionsWithStepCount(jobName, getPageOffset(pageable), pageable.getPageSize()) + : Collections.emptyList(); + return new PageImpl<>(jobExecutions, pageable, total); + } + + @Override + public TaskJobExecution getJobExecution(long jobExecutionId, String schemaTarget) throws NoSuchJobExecutionException { + List jobExecutions = getJobExecutionPage(jobExecutionId, schemaTarget); + if (jobExecutions.isEmpty()) { + throw new NoSuchJobExecutionException(String.format("Job id %s for schema target %s not found", jobExecutionId, schemaTarget)); + } + if (jobExecutions.size() > 1) { + logger.debug("Too many job executions:{}", jobExecutions); + logger.warn("Expected only 1 job for {}: not {}", jobExecutionId, jobExecutions.size()); + } + + TaskJobExecution taskJobExecution = jobExecutions.get(0); + JobService jobService = jobServiceContainer.get(taskJobExecution.getSchemaTarget()); + jobService.addStepExecutions(taskJobExecution.getJobExecution()); + return taskJobExecution; + } + + private List getJobExecutionPage(long jobExecutionId, String schemaTarget) { + return queryForProvider( + byJobExecutionIdAndSchemaPagingQueryProvider, + new JobExecutionRowMapper(true), + 0, + 2, + jobExecutionId, + schemaTarget + ); + } + + private int countJobExecutions() { + logger.debug("countJobExecutions:{}", GET_COUNT); + Integer count = jdbcTemplate.queryForObject(GET_COUNT, Integer.class); + return count != null ? count : 0; + } + + private int countJobExecutionsByDate(Date fromDate, Date toDate) { + Assert.notNull(fromDate, "fromDate must not be null"); + Assert.notNull(toDate, "toDate must not be null"); + logger.debug("countJobExecutionsByDate:{}:{}:{}", fromDate, toDate, GET_COUNT_BY_DATE); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_DATE, Integer.class, fromDate, toDate); + return count != null ? count : 0; + } + + private int countJobExecutions(String jobName) { + logger.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME, Integer.class, jobName); + return count != null ? count : 0; + } + + private int countJobExecutions(BatchStatus status) { + logger.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_STATUS, Integer.class, status.name()); + return count != null ? count : 0; + } + + private int countJobExecutions(String jobName, BatchStatus status) { + logger.debug("countJobExecutions:{}:{}", jobName, status); + Integer count; + if (StringUtils.hasText(jobName) && status != null) { + logger.debug("countJobExecutions:{}:{}:{}", jobName, status, GET_COUNT_BY_JOB_NAME_AND_STATUS); + count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME_AND_STATUS, Integer.class, jobName, status.name()); + } else if (status != null) { + logger.debug("countJobExecutions:{}:{}", status, GET_COUNT_BY_STATUS); + count = jdbcTemplate.queryForObject(GET_COUNT_BY_STATUS, Integer.class, status.name()); + } else if (StringUtils.hasText(jobName)) { + logger.debug("countJobExecutions:{}:{}", jobName, GET_COUNT_BY_JOB_NAME); + count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_NAME, Integer.class, jobName); + } else { + count = jdbcTemplate.queryForObject(GET_COUNT, Integer.class); + } + return count != null ? count : 0; + } + + private int countJobExecutionsByInstanceId(int jobInstanceId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + logger.debug("countJobExecutionsByInstanceId:{}:{}:{}", jobInstanceId, schemaTarget, GET_COUNT_BY_JOB_INSTANCE_ID); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_JOB_INSTANCE_ID, Integer.class, jobInstanceId, schemaTarget); + return count != null ? count : 0; + } + + private int countJobExecutionsByTaskExecutionId(int taskExecutionId, String schemaTarget) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + logger.debug("countJobExecutionsByTaskExecutionId:{}:{}:{}", taskExecutionId, schemaTarget, GET_COUNT_BY_TASK_EXECUTION_ID); + Integer count = jdbcTemplate.queryForObject(GET_COUNT_BY_TASK_EXECUTION_ID, Integer.class, taskExecutionId, schemaTarget); + return count != null ? count : 0; + } + + private List getJobExecutionsWithStepCountFilteredByJobInstanceId( + int jobInstanceId, + String schemaTarget, + int start, + int count + ) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return queryForProvider( + byJobInstanceIdWithStepCountPagingQueryProvider, + new JobExecutionRowMapper(true), + start, + count, + jobInstanceId, + schemaTarget + ); + } + + private List getJobExecutionsWithStepCountFilteredByTaskExecutionId( + int taskExecutionId, + String schemaTarget, + int start, + int count + ) { + if (!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return queryForProvider( + byTaskExecutionIdWithStepCountPagingQueryProvider, + new JobExecutionRowMapper(true), + start, + count, + taskExecutionId, + schemaTarget + ); + } + + private List getJobExecutions(String jobName, BatchStatus status, int start, int count) throws NoSuchJobExecutionException { + if (StringUtils.hasText(jobName) && status != null) { + return queryForProvider(byJobNameAndStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName, status.name()); + } else if (status != null) { + return queryForProvider(byStatusPagingQueryProvider, new JobExecutionRowMapper(false), start, count, status.name()); + } else if (StringUtils.hasText(jobName)) { + return queryForProvider(byJobNamePagingQueryProvider, new JobExecutionRowMapper(false), start, count, jobName); + } + return queryForProvider(allExecutionsPagingQueryProviderNoStepCount, new JobExecutionRowMapper(false), start, count); + } + + private List getJobExecutionsWithStepCount(String jobName, int start, int count) { + return queryForProvider(byJobNameWithStepCountPagingQueryProvider, new JobExecutionRowMapper(true), start, count, jobName); + } + + public List getJobExecutionsWithStepCount(int start, int count) { + return queryForProvider(allExecutionsPagingQueryProvider, new JobExecutionRowMapper(true), start, count); + } + + protected JobParameters getJobParameters(Long executionId, String schemaTarget) { + final Map map = new HashMap<>(); + final SchemaVersionTarget schemaVersionTarget = schemaService.getTarget(schemaTarget); + boolean boot2 = AppBootSchemaVersion.BOOT2 == schemaVersionTarget.getSchemaVersion(); + RowCallbackHandler handler; + if (boot2) { + handler = rs -> { + String keyName = rs.getString("KEY_NAME"); + JobParameter.ParameterType type = JobParameter.ParameterType.valueOf(rs.getString("TYPE_CD")); + boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y"); + JobParameter value; + switch (type) { + case STRING: + value = new JobParameter(rs.getString("STRING_VAL"), identifying); + break; + case LONG: + long longValue = rs.getLong("LONG_VAL"); + value = new JobParameter(rs.wasNull() ? null : longValue, identifying); + break; + case DOUBLE: + double doubleValue = rs.getDouble("DOUBLE_VAL"); + value = new JobParameter(rs.wasNull() ? null : doubleValue, identifying); + break; + case DATE: + value = new JobParameter(rs.getTimestamp("DATE_VAL"), identifying); + break; + default: + logger.error("Unknown type:{} for {}", type, keyName); + return; + } + map.put(keyName, value); + }; + } else { + handler = rs -> { + String parameterName = rs.getString("PARAMETER_NAME"); + Class parameterType = null; + try { + parameterType = Class.forName(rs.getString("PARAMETER_TYPE")); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + String stringValue = rs.getString("PARAMETER_VALUE"); + boolean identifying = rs.getString("IDENTIFYING").equalsIgnoreCase("Y"); + Object typedValue = conversionService.convert(stringValue, parameterType); + JobParameter value; + if (typedValue instanceof String) { + value = new JobParameter((String) typedValue, identifying); + } else if (typedValue instanceof Date) { + value = new JobParameter((Date) typedValue, identifying); + } else if (typedValue instanceof Double) { + value = new JobParameter((Double) typedValue, identifying); + } else if (typedValue instanceof Number) { + value = new JobParameter(((Number) typedValue).doubleValue(), identifying); + } else if (typedValue instanceof Instant) { + value = new JobParameter(new Date(((Instant) typedValue).toEpochMilli()), identifying); + } else { + + value = new JobParameter(typedValue != null ? typedValue.toString() : null, identifying); + } + map.put(parameterName, value); + }; + } + + jdbcTemplate.query( + getQuery( + boot2 ? FIND_PARAMS_FROM_ID2 : FIND_PARAMS_FROM_ID3, + schemaVersionTarget.getBatchPrefix() + ), + handler, + executionId + ); + return new JobParameters(map); + } + + private > List queryForProvider(P pagingQueryProvider, M mapper, int start, int count, Object... arguments) { + if (start <= 0) { + String sql = pagingQueryProvider.generateFirstPageQuery(count); + if (logger.isDebugEnabled()) { + logger.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); + } + return jdbcTemplate.query(sql, mapper, arguments); + } else { + try { + String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count); + if (logger.isDebugEnabled()) { + logger.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); + } + Long startValue; + startValue = jdbcTemplate.queryForObject(sqlJump, Long.class, arguments); + List args = new ArrayList<>(Arrays.asList(arguments)); + args.add(startValue); + String sql = pagingQueryProvider.generateRemainingPagesQuery(count); + if (logger.isDebugEnabled()) { + logger.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); + } + return jdbcTemplate.query(sql, mapper, args.toArray()); + } catch (IncorrectResultSizeDataAccessException x) { + return Collections.emptyList(); + } + } + } + + private >> List queryForProvider(P pagingQueryProvider, R extractor, int start, int count, Object... arguments) { + if (start <= 0) { + String sql = pagingQueryProvider.generateFirstPageQuery(count); + if (logger.isDebugEnabled()) { + logger.debug("queryFirstPage:{}:{}:{}:{}", sql, start, count, Arrays.asList(arguments)); + } + return jdbcTemplate.query(sql, extractor, arguments); + } else { + String sqlJump = pagingQueryProvider.generateJumpToItemQuery(start, count); + if (logger.isDebugEnabled()) { + logger.debug("queryJumpToItem:{}:{}:{}:{}", sqlJump, start, count, Arrays.asList(arguments)); + } + Long startValue = jdbcTemplate.queryForObject(sqlJump, Long.class, arguments); + List args = new ArrayList<>(Arrays.asList(arguments)); + args.add(startValue); + String sql = pagingQueryProvider.generateRemainingPagesQuery(count); + if (logger.isDebugEnabled()) { + logger.debug("queryRemaining:{}:{}:{}:{}", sql, start, count, args); + } + return jdbcTemplate.query(sql, extractor, args.toArray()); + } + } + + private List getTaskJobInstancesForJobName(String jobName, Pageable pageable) { + Assert.notNull(pageable, "pageable must not be null"); + Assert.notNull(jobName, "jobName must not be null"); + int start = getPageOffset(pageable); + int count = pageable.getPageSize(); + return queryForProvider(jobExecutionsPagingQueryProviderByName, new JobInstanceExecutionsExtractor(false), start, count, jobName); + } + + private TaskJobExecution createJobExecutionFromResultSet(ResultSet rs, int row, boolean readStepCount) throws SQLException { + long taskExecutionId = rs.getLong("TASK_EXECUTION_ID"); + Long jobExecutionId = rs.getLong("JOB_EXECUTION_ID"); + JobExecution jobExecution; + String schemaTarget = rs.getString("SCHEMA_TARGET"); + JobParameters jobParameters = getJobParameters(jobExecutionId, schemaTarget); + + JobInstance jobInstance = new JobInstance(rs.getLong("JOB_INSTANCE_ID"), rs.getString("JOB_NAME")); + jobExecution = new JobExecution(jobInstance, jobParameters); + jobExecution.setId(jobExecutionId); + + jobExecution.setStartTime(rs.getTimestamp("START_TIME")); + jobExecution.setEndTime(rs.getTimestamp("END_TIME")); + jobExecution.setStatus(BatchStatus.valueOf(rs.getString("STATUS"))); + jobExecution.setExitStatus(new ExitStatus(rs.getString("EXIT_CODE"), rs.getString("EXIT_MESSAGE"))); + jobExecution.setCreateTime(rs.getTimestamp("CREATE_TIME")); + jobExecution.setLastUpdated(rs.getTimestamp("LAST_UPDATED")); + jobExecution.setVersion(rs.getInt("VERSION")); + + return readStepCount ? + new TaskJobExecution(taskExecutionId, jobExecution, true, rs.getInt("STEP_COUNT"), schemaTarget) : + new TaskJobExecution(taskExecutionId, jobExecution, true, schemaTarget); + } + + private List getTaskJobExecutionsByDate(Date startDate, Date endDate, int start, int count) { + return queryForProvider( + executionsByDateRangeWithStepCountPagingQueryProvider, + new JobExecutionRowMapper(true), + start, + count, + startDate, + endDate + ); + } + + private class JobInstanceExecutionsExtractor implements ResultSetExtractor> { + final boolean readStepCount; + + public JobInstanceExecutionsExtractor(boolean readStepCount) { + this.readStepCount = readStepCount; + } + + @Override + public List extractData(ResultSet rs) throws SQLException, + DataAccessException { + final Map> taskJobExecutions = new HashMap<>(); + final Map jobInstances = new TreeMap<>(); + + while (rs.next()) { + Long id = rs.getLong("JOB_INSTANCE_ID"); + JobInstance jobInstance; + if (!jobInstances.containsKey(id)) { + jobInstance = new JobInstance(id, rs.getString("JOB_NAME")); + jobInstances.put(id, jobInstance); + } else { + jobInstance = jobInstances.get(id); + } + long taskId = rs.getLong("TASK_EXECUTION_ID"); + if (!rs.wasNull()) { + String schemaTarget = rs.getString("SCHEMA_TARGET"); + List executions = taskJobExecutions.computeIfAbsent(id, k -> new ArrayList<>()); + long jobExecutionId = rs.getLong("JOB_EXECUTION_ID"); + JobParameters jobParameters = getJobParameters(jobExecutionId, schemaTarget); + JobExecution jobExecution = new JobExecution(jobInstance, jobExecutionId, jobParameters, null); + + int stepCount = readStepCount ? rs.getInt("STEP_COUNT") : 0; + TaskJobExecution execution = new TaskJobExecution(taskId, jobExecution, true, stepCount, schemaTarget); + executions.add(execution); + } + } + return jobInstances.values() + .stream() + .map(jobInstance -> new JobInstanceExecutions(jobInstance, taskJobExecutions.get(jobInstance.getInstanceId()))) + .collect(Collectors.toList()); + } + + } + + class JobExecutionRowMapper implements RowMapper { + boolean readStepCount; + + JobExecutionRowMapper(boolean readStepCount) { + this.readStepCount = readStepCount; + } + + @Override + public TaskJobExecution mapRow(ResultSet rs, int rowNum) throws SQLException { + return createJobExecutionFromResultSet(rs, rowNum, readStepCount); + } + + } + + protected String getQuery(String base, String tablePrefix) { + return StringUtils.replace(base, "%PREFIX%", tablePrefix); + } + + private int getPageOffset(Pageable pageable) { + if (pageable.getOffset() > (long) Integer.MAX_VALUE) { + throw new OffsetOutOfBoundsException("The pageable offset requested for this query is greater than MAX_INT."); + } + return (int) pageable.getOffset(); + } + + /** + * @return a {@link PagingQueryProvider} for all job executions + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider() throws Exception { + return getPagingQueryProvider(null, null, null, Collections.emptyMap()); + } + + /** + * @return a {@link PagingQueryProvider} for all job executions with the + * provided where clause + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider(String whereClause) throws Exception { + return getPagingQueryProvider(null, null, whereClause, Collections.emptyMap()); + } + + /** + * @return a {@link PagingQueryProvider} with a where clause to narrow the + * query + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider(String fromClause, String whereClause) throws Exception { + return getPagingQueryProvider(null, fromClause, whereClause, Collections.emptyMap()); + } + + private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause) throws Exception { + return getPagingQueryProvider(fields, fromClause, whereClause, Collections.emptyMap()); + } + + /** + * @return a {@link PagingQueryProvider} with a where clause to narrow the + * query + * @throws Exception if page provider is not created. + */ + private PagingQueryProvider getPagingQueryProvider(String fields, String fromClause, String whereClause, Map sortKeys) throws Exception { + SqlPagingQueryProviderFactoryBean factory = new SqlPagingQueryProviderFactoryBean(); + factory.setDataSource(dataSource); + fromClause = "AGGREGATE_JOB_INSTANCE I JOIN AGGREGATE_JOB_EXECUTION E ON I.JOB_INSTANCE_ID=E.JOB_INSTANCE_ID AND I.SCHEMA_TARGET=E.SCHEMA_TARGET" + (fromClause == null ? "" : " " + fromClause); + factory.setFromClause(fromClause); + if (fields == null) { + fields = FIELDS; + } + factory.setSelectClause(fields); + if (sortKeys.isEmpty()) { + sortKeys = Collections.singletonMap("E.JOB_EXECUTION_ID", Order.DESCENDING); + } + factory.setSortKeys(sortKeys); + factory.setWhereClause(whereClause); + + return factory.getObject(); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java index bd05d51e59..4aecb3b514 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowJobExecutionDao.java @@ -38,6 +38,7 @@ * be migrated to Spring Batch itself eventually. * * @author Gunnar Hillert + * @author Corneil du Plessis */ public class JdbcDataflowJobExecutionDao implements DataflowJobExecutionDao { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java index 02c9de1537..35a374eaf8 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionDao.java @@ -16,19 +16,21 @@ package org.springframework.cloud.dataflow.server.repository; -import java.sql.ResultSet; -import java.sql.SQLException; import java.sql.Types; import java.util.Collections; +import java.util.HashSet; import java.util.Set; import java.util.TreeSet; import javax.sql.DataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.task.configuration.TaskProperties; import org.springframework.cloud.task.repository.dao.JdbcTaskExecutionDao; import org.springframework.dao.DataAccessException; -import org.springframework.jdbc.core.ResultSetExtractor; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.util.Assert; @@ -42,22 +44,23 @@ * @author Gunnar Hillert * @author Glenn Renfro * @author Ilayaperumal Gopinathan + * @author Corneil du Plessis */ public class JdbcDataflowTaskExecutionDao implements DataflowTaskExecutionDao { - + private final static Logger logger = LoggerFactory.getLogger(JdbcDataflowTaskExecutionDao.class); private final NamedParameterJdbcTemplate jdbcTemplate; private static final String DELETE_TASK_EXECUTIONS = "DELETE FROM %PREFIX%EXECUTION " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; private static final String DELETE_TASK_EXECUTION_PARAMS = "DELETE FROM %PREFIX%EXECUTION_PARAMS " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; private static final String DELETE_TASK_TASK_BATCH = "DELETE FROM %PREFIX%TASK_BATCH " - + "WHERE task_execution_id in (:taskExecutionIds)"; + + "WHERE TASK_EXECUTION_ID in (:taskExecutionIds)"; - private static final String SELECT_CHILD_TASK_EXECUTION_IDS = "SELECT task_execution_id FROM %PREFIX%EXECUTION " - + "WHERE parent_execution_id in (:parentTaskExecutionIds)"; + private static final String SELECT_CHILD_TASK_EXECUTION_IDS = "SELECT TASK_EXECUTION_ID FROM %PREFIX%EXECUTION " + + "WHERE PARENT_EXECUTION_ID in (:parentTaskExecutionIds)"; private static final String FIND_TASK_EXECUTION_IDS_BY_TASK_NAME = "SELECT TASK_EXECUTION_ID " + "from %PREFIX%EXECUTION where TASK_NAME = :taskName"; @@ -87,7 +90,7 @@ public class JdbcDataflowTaskExecutionDao implements DataflowTaskExecutionDao { + "from %PREFIX%EXECUTION where TASK_NAME = :taskName"; - private TaskProperties taskProperties; + private final TaskProperties taskProperties; /** * @param dataSource used by the dao to execute queries and updates the tables. @@ -104,7 +107,7 @@ public JdbcDataflowTaskExecutionDao(DataSource dataSource, TaskProperties taskPr public int deleteTaskExecutionsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_EXECUTIONS); + final String query = SchemaUtilities.getQuery(DELETE_TASK_EXECUTIONS, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } @@ -112,7 +115,7 @@ public int deleteTaskExecutionsByTaskExecutionIds(Set taskExecutionIds) { public int deleteTaskExecutionParamsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_EXECUTION_PARAMS); + final String query = SchemaUtilities.getQuery(DELETE_TASK_EXECUTION_PARAMS, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } @@ -120,27 +123,24 @@ public int deleteTaskExecutionParamsByTaskExecutionIds(Set taskExecutionId public int deleteTaskTaskBatchRelationshipsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - final String query = getQuery(DELETE_TASK_TASK_BATCH); + final String query = SchemaUtilities.getQuery(DELETE_TASK_TASK_BATCH, this.taskProperties.getTablePrefix()); return this.jdbcTemplate.update(query, queryParameters); } - private String getQuery(String base) { - return StringUtils.replace(base, "%PREFIX%", this.taskProperties.getTablePrefix()); - } + @Override public Set findChildTaskExecutionIds(Set taskExecutionIds) { + logger.debug("findChildTaskExecutionIds:{}", taskExecutionIds); final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("parentTaskExecutionIds", taskExecutionIds); Set childTaskExecutionIds; try { childTaskExecutionIds = this.jdbcTemplate.query( - getQuery(SELECT_CHILD_TASK_EXECUTION_IDS), queryParameters, - new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + SchemaUtilities.getQuery(SELECT_CHILD_TASK_EXECUTION_IDS, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set jobExecutionIds = new TreeSet<>(); @@ -150,19 +150,21 @@ public Set extractData(ResultSet resultSet) } return jobExecutionIds; - } }); + Assert.notNull(childTaskExecutionIds, "Expected childTaskExecutionIds"); } catch (DataAccessException e) { childTaskExecutionIds = Collections.emptySet(); } - if (!childTaskExecutionIds.isEmpty()) { - childTaskExecutionIds.addAll(this.findChildTaskExecutionIds(childTaskExecutionIds)); + Set newChildren = new HashSet<>(childTaskExecutionIds); + newChildren.removeAll(taskExecutionIds); + if(!newChildren.isEmpty()) { + childTaskExecutionIds.addAll(this.findChildTaskExecutionIds(newChildren)); + } } - + logger.debug("findChildTaskExecutionIds:childTaskExecutionIds={}", childTaskExecutionIds); return childTaskExecutionIds; - } @Override @@ -171,11 +173,10 @@ public Set getTaskExecutionIdsByTaskName(String taskName) { .addValue("taskName", taskName, Types.VARCHAR); try { - return this.jdbcTemplate.query(getQuery(FIND_TASK_EXECUTION_IDS_BY_TASK_NAME), - queryParameters, new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(FIND_TASK_EXECUTION_IDS_BY_TASK_NAME, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set taskExecutionIds = new TreeSet<>(); while (resultSet.next()) { @@ -183,7 +184,6 @@ public Set extractData(ResultSet resultSet) .add(resultSet.getLong("TASK_EXECUTION_ID")); } return taskExecutionIds; - } }); } catch (DataAccessException e) { @@ -193,60 +193,54 @@ public Set extractData(ResultSet resultSet) @Override public Integer getAllTaskExecutionsCount(boolean onlyCompleted, String taskName) { - String QUERY = null; - MapSqlParameterSource queryParameters = null; + String QUERY; + MapSqlParameterSource queryParameters = new MapSqlParameterSource(); if (StringUtils.hasText(taskName)) { - queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName, Types.VARCHAR); + queryParameters.addValue("taskName", taskName, Types.VARCHAR); QUERY = (onlyCompleted) ? GET_COMPLETED_TASK_EXECUTIONS_COUNT_BY_TASK_NAME : GET_ALL_TASK_EXECUTIONS_COUNT_BY_TASK_NAME; } else { QUERY = (onlyCompleted) ? GET_COMPLETED_TASK_EXECUTIONS_COUNT: GET_ALL_TASK_EXECUTIONS_COUNT; } try { - return this.jdbcTemplate.query(getQuery(QUERY), - queryParameters, new ResultSetExtractor() { - @Override - public Integer extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(QUERY, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { if (resultSet.next()) { return resultSet.getInt("count"); } - return Integer.valueOf(0); - } + return 0; }); } catch (DataAccessException e) { - return Integer.valueOf(0); + return 0; } } @Override public Set getAllTaskExecutionIds(boolean onlyCompleted, String taskName) { - String QUERY = null; - MapSqlParameterSource queryParameters = null; + String QUERY; + MapSqlParameterSource queryParameters = new MapSqlParameterSource(); if (StringUtils.hasText(taskName)) { - queryParameters = new MapSqlParameterSource() - .addValue("taskName", taskName, Types.VARCHAR); + queryParameters.addValue("taskName", taskName, Types.VARCHAR); QUERY = (onlyCompleted) ? FIND_ALL_COMPLETED_TASK_EXECUTION_IDS_BY_TASK_NAME : FIND_ALL_TASK_EXECUTION_IDS_BY_TASK_NAME; } else { QUERY = (onlyCompleted) ? FIND_ALL_COMPLETED_TASK_EXECUTION_IDS : FIND_ALL_TASK_EXECUTION_IDS; } try { - return this.jdbcTemplate.query(getQuery(QUERY), queryParameters, new ResultSetExtractor>() { - @Override - public Set extractData(ResultSet resultSet) - throws SQLException, DataAccessException { + return this.jdbcTemplate.query( + SchemaUtilities.getQuery(QUERY, this.taskProperties.getTablePrefix()), + queryParameters, + resultSet -> { Set taskExecutionIds = new TreeSet<>(); while (resultSet.next()) { - taskExecutionIds - .add(resultSet.getLong("TASK_EXECUTION_ID")); + taskExecutionIds.add(resultSet.getLong("TASK_EXECUTION_ID")); } return taskExecutionIds; - } }); } catch (DataAccessException e) { diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java index 878875c93d..0f85163dbb 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JdbcDataflowTaskExecutionMetadataDao.java @@ -33,6 +33,7 @@ import org.springframework.cloud.dataflow.server.repository.support.AppDeploymentRequestMixin; import org.springframework.cloud.dataflow.server.repository.support.Order; import org.springframework.cloud.dataflow.server.repository.support.PagingQueryProvider; +import org.springframework.cloud.dataflow.server.repository.support.SchemaUtilities; import org.springframework.cloud.dataflow.server.repository.support.SqlPagingQueryProviderFactoryBean; import org.springframework.cloud.dataflow.server.service.impl.ResourceDeserializer; import org.springframework.cloud.dataflow.server.service.impl.ResourceMixin; @@ -52,23 +53,21 @@ * JDBC implementation for the {@code DataflowTaskExecutionMetadataDao} * * @author Michael Minella - * @since 2.3 + * @author Corneil du Plessis * @see DataflowTaskExecutionMetadataDao + * @since 2.3 */ public class JdbcDataflowTaskExecutionMetadataDao implements DataflowTaskExecutionMetadataDao { - private static final String INSERT_SQL = "INSERT INTO task_execution_metadata (id, task_execution_id, " + - "task_execution_manifest) VALUES (:id, :taskExecutionId, :taskExecutionManifest)"; + private static final String INSERT_SQL = "INSERT INTO %PREFIX%EXECUTION_METADATA (ID, TASK_EXECUTION_ID, " + + "TASK_EXECUTION_MANIFEST) VALUES (:id, :taskExecutionId, :taskExecutionManifest)"; - private static final String FIND_MANIFEST_BY_TASK_EXECUTION_ID = "select m.task_execution_manifest as task_execution_manifest " + - "from task_execution_metadata m inner join " + - "TASK_EXECUTION e on m.task_execution_id = e.TASK_EXECUTION_ID " + - "where e.TASK_EXECUTION_ID = :taskExecutionId"; + private static final String FIND_MANIFEST_BY_TASK_EXECUTION_ID = "SELECT M.TASK_EXECUTION_MANIFEST AS TASK_EXECUTION_MANIFEST " + + "FROM %PREFIX%EXECUTION_METADATA M INNER JOIN " + + "%PREFIX%EXECUTION E ON M.TASK_EXECUTION_ID = E.TASK_EXECUTION_ID " + + "WHERE E.TASK_EXECUTION_ID = :taskExecutionId"; - private static final String DELETE_MANIFEST_BY_TASK_EXECUTION_IDS = - "DELETE FROM task_execution_metadata " + - "WHERE task_execution_id " + - "IN (:taskExecutionIds)"; + private static final String DELETE_MANIFEST_BY_TASK_EXECUTION_IDS = "DELETE FROM %PREFIX%EXECUTION_METADATA WHERE TASK_EXECUTION_ID IN (:taskExecutionIds)"; private final NamedParameterJdbcTemplate jdbcTemplate; @@ -78,9 +77,14 @@ public class JdbcDataflowTaskExecutionMetadataDao implements DataflowTaskExecuti private final DataSource dataSource; - public JdbcDataflowTaskExecutionMetadataDao(DataSource dataSource, - DataFieldMaxValueIncrementer incrementer) { + private final String tablePrefix; + public JdbcDataflowTaskExecutionMetadataDao( + DataSource dataSource, + DataFieldMaxValueIncrementer incrementer, + String prefix + ) { + this.tablePrefix = prefix; this.incrementer = incrementer; this.jdbcTemplate = new NamedParameterJdbcTemplate(dataSource); @@ -108,9 +112,8 @@ public void save(TaskExecution taskExecution, TaskManifest manifest) { .addValue("taskExecutionId", taskExecution.getExecutionId()) .addValue("taskExecutionManifest", manifestJson); - this.jdbcTemplate.update(INSERT_SQL, queryParameters); - } - catch (JsonProcessingException e) { + this.jdbcTemplate.update(SchemaUtilities.getQuery(INSERT_SQL, tablePrefix), queryParameters); + } catch (JsonProcessingException e) { throw new IllegalArgumentException("Unable to serialize manifest", e); } } @@ -118,14 +121,14 @@ public void save(TaskExecution taskExecution, TaskManifest manifest) { @Override public TaskManifest getLatestManifest(String taskName) { Map sortKeys = new HashMap<>(1); - sortKeys.put("e.TASK_EXECUTION_ID", Order.DESCENDING); + sortKeys.put("E.TASK_EXECUTION_ID", Order.DESCENDING); SqlPagingQueryProviderFactoryBean sqlPagingQueryProviderFactoryBean = new SqlPagingQueryProviderFactoryBean(); sqlPagingQueryProviderFactoryBean.setDataSource(this.dataSource); - sqlPagingQueryProviderFactoryBean.setSelectClause("task_execution_manifest"); - sqlPagingQueryProviderFactoryBean.setFromClause("task_execution_metadata m inner join TASK_EXECUTION e on m.task_execution_id = e.TASK_EXECUTION_ID"); - sqlPagingQueryProviderFactoryBean.setWhereClause("e.TASK_NAME = :taskName"); + sqlPagingQueryProviderFactoryBean.setSelectClause("TASK_EXECUTION_MANIFEST"); + sqlPagingQueryProviderFactoryBean.setFromClause(SchemaUtilities.getQuery("%PREFIX%EXECUTION_METADATA M INNER JOIN %PREFIX%EXECUTION E ON M.TASK_EXECUTION_ID = E.TASK_EXECUTION_ID", tablePrefix)); + sqlPagingQueryProviderFactoryBean.setWhereClause("E.TASK_NAME = :taskName"); sqlPagingQueryProviderFactoryBean.setSortKeys(sortKeys); try { @@ -139,17 +142,14 @@ public TaskManifest getLatestManifest(String taskName) { return this.jdbcTemplate.queryForObject(queryProvider.getPageQuery(PageRequest.of(0, 1)), queryParameters, (resultSet, i) -> { try { - return objectMapper.readValue(resultSet.getString("task_execution_manifest"), TaskManifest.class); - } - catch (IOException e) { + return objectMapper.readValue(resultSet.getString("TASK_EXECUTION_MANIFEST"), TaskManifest.class); + } catch (IOException e) { throw new IllegalArgumentException("Unable to deserialize manifest", e); } }); - } - catch (EmptyResultDataAccessException erdae) { + } catch (EmptyResultDataAccessException erdae) { return null; - } - catch (Exception e) { + } catch (Exception e) { throw new IllegalStateException("Unable to generate query", e); } } @@ -160,18 +160,16 @@ public TaskManifest findManifestById(Long id) { .addValue("taskExecutionId", id); try { - return this.jdbcTemplate.queryForObject(FIND_MANIFEST_BY_TASK_EXECUTION_ID, + return this.jdbcTemplate.queryForObject(SchemaUtilities.getQuery(FIND_MANIFEST_BY_TASK_EXECUTION_ID, tablePrefix), queryParameters, (resultSet, i) -> { try { - return objectMapper.readValue(resultSet.getString("task_execution_manifest"), TaskManifest.class); - } - catch (IOException e) { + return objectMapper.readValue(resultSet.getString("TASK_EXECUTION_MANIFEST"), TaskManifest.class); + } catch (IOException e) { throw new IllegalArgumentException("Unable to deserialize manifest", e); } }); - } - catch (EmptyResultDataAccessException erdae) { + } catch (EmptyResultDataAccessException erdae) { return null; } } @@ -180,6 +178,6 @@ public TaskManifest findManifestById(Long id) { public int deleteManifestsByTaskExecutionIds(Set taskExecutionIds) { final MapSqlParameterSource queryParameters = new MapSqlParameterSource() .addValue("taskExecutionIds", taskExecutionIds); - return this.jdbcTemplate.update(DELETE_MANIFEST_BY_TASK_EXECUTION_IDS, queryParameters); + return this.jdbcTemplate.update(SchemaUtilities.getQuery(DELETE_MANIFEST_BY_TASK_EXECUTION_IDS, tablePrefix), queryParameters); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobExecutionDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobExecutionDaoContainer.java new file mode 100644 index 0000000000..c9da1a1a86 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobExecutionDaoContainer.java @@ -0,0 +1,65 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.batch.core.repository.dao.JobExecutionDao; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; +import org.springframework.cloud.dataflow.core.database.support.MultiSchemaIncrementerFactory; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.batch.JdbcSearchableJobExecutionDao; +import org.springframework.cloud.dataflow.server.batch.SearchableJobExecutionDao; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.cloud.dataflow.server.repository.support.JdbcParameterUtils; +import org.springframework.jdbc.support.JdbcUtils; +import org.springframework.util.StringUtils; + +/** + * Provides a container of {@link SearchableJobExecutionDao} for each schema target. + * @author Corneil du Plessis + */ +public class JobExecutionDaoContainer { + private final Map container = new HashMap<>(); + + public JobExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + JdbcSearchableJobExecutionDao jdbcSearchableJobExecutionDao = new JdbcSearchableJobExecutionDao(); + jdbcSearchableJobExecutionDao.setDataSource(dataSource); + jdbcSearchableJobExecutionDao.setTablePrefix(target.getBatchPrefix()); + try { + jdbcSearchableJobExecutionDao.afterPropertiesSet(); + container.put(target.getName(), jdbcSearchableJobExecutionDao); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JdbcSearchableJobExecutionDao from:" + target.getName(), x); + } + } + } + + public SearchableJobExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobRepositoryContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobRepositoryContainer.java new file mode 100644 index 0000000000..c3914de4b1 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/JobRepositoryContainer.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.support.JobRepositoryFactoryBean; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.util.StringUtils; + +public class JobRepositoryContainer { + private final Map container = new HashMap<>(); + + public JobRepositoryContainer(DataSource dataSource, PlatformTransactionManager transactionManager, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + JobRepositoryFactoryBean factoryBean = new JobRepositoryFactoryBean(); + factoryBean.setDataSource(dataSource); + factoryBean.setTablePrefix(target.getBatchPrefix()); + factoryBean.setTransactionManager(transactionManager); + + try { + factoryBean.afterPropertiesSet(); + container.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JobRepository for:" + target.getName(), x); + } + } + } + + public JobRepository get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java index f7ed7b657a..52df3e674e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/NoSuchTaskExecutionException.java @@ -36,8 +36,11 @@ public class NoSuchTaskExecutionException extends RuntimeException { * * @param id the id of the {@link TaskExecution} that could not be found */ - public NoSuchTaskExecutionException(long id) { - super("Could not find TaskExecution with id " + id); + public NoSuchTaskExecutionException(long id, String schemaTarget) { + super("Could not find TaskExecution with id " + id + " for schema target " + schemaTarget); + } + public NoSuchTaskExecutionException(String externalExecutionId, String platform) { + super("Could not find TaskExecution with id " + externalExecutionId + " for platform " + platform); } /** @@ -45,7 +48,7 @@ public NoSuchTaskExecutionException(long id) { * * @param ids the ids of the {@link TaskExecution} that could not be found */ - public NoSuchTaskExecutionException(Set ids) { - super("Could not find TaskExecutions with the following ids: " + StringUtils.collectionToDelimitedString(ids, ", ")); + public NoSuchTaskExecutionException(Set ids, String schemaTarget) { + super("Could not find TaskExecutions for schema target " + schemaTarget + " with the following ids: " + StringUtils.collectionToDelimitedString(ids, ", ")); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskBatchDaoContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskBatchDaoContainer.java new file mode 100644 index 0000000000..14c38db946 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskBatchDaoContainer.java @@ -0,0 +1,47 @@ +/* + * Copyright 2023 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.task.batch.listener.TaskBatchDao; +import org.springframework.cloud.task.batch.listener.support.JdbcTaskBatchDao; +import org.springframework.util.StringUtils; + +/** + * Provides a container of {@link TaskBatchDao} for each schema target + * @author Corneil du Plessis + */ +public class TaskBatchDaoContainer { + private final Map taskBatchDaoContainer = new HashMap<>(); + + public TaskBatchDaoContainer(DataSource dataSource, SchemaService schemaService) { + for(SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + taskBatchDaoContainer.put(target.getName(), new JdbcTaskBatchDao(dataSource, target.getTaskPrefix())); + } + } + public TaskBatchDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return taskBatchDaoContainer.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java index c3b87a5da0..eb7464fae6 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/TaskDefinitionRepository.java @@ -45,4 +45,5 @@ public interface TaskDefinitionRepository extends KeyValueRepository taskExecutionDaoContainer = new HashMap<>(); + + public TaskExecutionDaoContainer(DataSource dataSource, SchemaService schemaService) { + for(SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + TaskExecutionDaoFactoryBean factoryBean = new MultiSchemaTaskExecutionDaoFactoryBean(dataSource, target.getTaskPrefix()); + try { + this.taskExecutionDaoContainer.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating TaskExecutionDao for " + target.getName(), x); + } + } + } + + public TaskExecutionDao get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + return taskExecutionDaoContainer.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java new file mode 100644 index 0000000000..5844c0ef56 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/MariaDBPagingQueryProvider.java @@ -0,0 +1,35 @@ +/* + * Copyright 2016 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.cloud.dataflow.server.repository.support; + +import org.springframework.data.domain.Pageable; + +/** + * MySQL implementation of a {@link PagingQueryProvider} using database specific features. + * + * @author Glenn Renfro + * @author Corneil du Plessis + */ +public class MariaDBPagingQueryProvider extends AbstractSqlPagingQueryProvider { + @Override + public String getPageQuery(Pageable pageable) { + String topClause = "LIMIT " + pageable.getOffset() + ", " + + pageable.getPageSize(); + return SqlPagingQueryUtils.generateLimitJumpToQuery(this, topClause); + } + +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java new file mode 100644 index 0000000000..e5516585bd --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SchemaUtilities.java @@ -0,0 +1,15 @@ +package org.springframework.cloud.dataflow.server.repository.support; + +import org.springframework.util.StringUtils; + +public class SchemaUtilities { + private SchemaUtilities() { + } + + public static String getQuery(String query, String prefix, String defaultPrefix) { + return StringUtils.replace(query, "%PREFIX%", prefix != null ? prefix : defaultPrefix); + } + public static String getQuery(String query, String prefix) { + return StringUtils.replace(query, "%PREFIX%", prefix); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java index 776e8e504a..545946f57e 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/repository/support/SqlPagingQueryProviderFactoryBean.java @@ -16,12 +16,14 @@ package org.springframework.cloud.dataflow.server.repository.support; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import javax.sql.DataSource; import org.springframework.beans.factory.FactoryBean; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -47,16 +49,19 @@ public class SqlPagingQueryProviderFactoryBean implements FactoryBean sortKeys; - private Map providers = new HashMap(); - - { - providers.put(DatabaseType.HSQL, new HsqlPagingQueryProvider()); - providers.put(DatabaseType.H2, new H2PagingQueryProvider()); - providers.put(DatabaseType.MYSQL, new MySqlPagingQueryProvider()); - providers.put(DatabaseType.POSTGRES, new PostgresPagingQueryProvider()); - providers.put(DatabaseType.ORACLE, new OraclePagingQueryProvider()); - providers.put(DatabaseType.SQLSERVER, new SqlServerPagingQueryProvider()); - providers.put(DatabaseType.DB2, new Db2PagingQueryProvider()); + private final static Map providers; + + static { + Map providerMap = new HashMap(); + providerMap.put(DatabaseType.HSQL, new HsqlPagingQueryProvider()); + providerMap.put(DatabaseType.H2, new H2PagingQueryProvider()); + providerMap.put(DatabaseType.MYSQL, new MySqlPagingQueryProvider()); + providerMap.put(DatabaseType.MARIADB, new MariaDBPagingQueryProvider()); + providerMap.put(DatabaseType.POSTGRES, new PostgresPagingQueryProvider()); + providerMap.put(DatabaseType.ORACLE, new OraclePagingQueryProvider()); + providerMap.put(DatabaseType.SQLSERVER, new SqlServerPagingQueryProvider()); + providerMap.put(DatabaseType.DB2, new Db2PagingQueryProvider()); + providers = Collections.unmodifiableMap(providerMap); } /** diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobExplorerContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobExplorerContainer.java new file mode 100644 index 0000000000..be2be2b58e --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobExplorerContainer.java @@ -0,0 +1,40 @@ +package org.springframework.cloud.dataflow.server.service; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.explore.support.JobExplorerFactoryBean; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.util.StringUtils; + +public class JobExplorerContainer { + private final Map container = new HashMap<>(); + + public JobExplorerContainer(DataSource dataSource, SchemaService schemaService) { + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + JobExplorerFactoryBean factoryBean = new JobExplorerFactoryBean(); + factoryBean.setDataSource(dataSource); + factoryBean.setTablePrefix(target.getBatchPrefix()); + try { + factoryBean.afterPropertiesSet(); + container.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JobExplorer for " + target.getName(), x); + } + } + } + + public JobExplorer get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java new file mode 100644 index 0000000000..91e4629f01 --- /dev/null +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/JobServiceContainer.java @@ -0,0 +1,58 @@ +package org.springframework.cloud.dataflow.server.service; + +import javax.sql.DataSource; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.batch.core.launch.support.SimpleJobLauncher; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; +import org.springframework.cloud.dataflow.server.batch.JobService; +import org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean; +import org.springframework.cloud.dataflow.server.controller.NoSuchSchemaTargetException; +import org.springframework.cloud.dataflow.server.repository.JobRepositoryContainer; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.util.StringUtils; + +public class JobServiceContainer { + private final static Logger logger = LoggerFactory.getLogger(JobServiceContainer.class); + private final Map container = new HashMap<>(); + + public JobServiceContainer( + DataSource dataSource, + PlatformTransactionManager platformTransactionManager, + SchemaService schemaService, + JobRepositoryContainer jobRepositoryContainer, + JobExplorerContainer jobExplorerContainer + ) { + + for(SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + SimpleJobServiceFactoryBean factoryBean = new SimpleJobServiceFactoryBean(); + factoryBean.setDataSource(dataSource); + factoryBean.setTransactionManager(platformTransactionManager); + factoryBean.setJobLauncher(new SimpleJobLauncher()); + factoryBean.setJobExplorer(jobExplorerContainer.get(target.getName())); + factoryBean.setJobRepository(jobRepositoryContainer.get(target.getName())); + factoryBean.setTablePrefix(target.getBatchPrefix()); + try { + factoryBean.afterPropertiesSet(); + container.put(target.getName(), factoryBean.getObject()); + } catch (Throwable x) { + throw new RuntimeException("Exception creating JobService for " + target.getName(), x); + } + } + } + public JobService get(String schemaTarget) { + if(!StringUtils.hasText(schemaTarget)) { + schemaTarget = SchemaVersionTarget.defaultTarget().getName(); + logger.info("get:default={}", schemaTarget); + } + if(!container.containsKey(schemaTarget)) { + throw new NoSuchSchemaTargetException(schemaTarget); + } + return container.get(schemaTarget); + } +} diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java index 1a9f44fec6..5fa742a279 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskDeleteService.java @@ -33,7 +33,7 @@ public interface TaskDeleteService { * * @param id the execution id */ - void cleanupExecution(long id); + void cleanupExecution(long id, String schemaTarget); /** * Cleanup the resources that resulted from running the task with the given execution @@ -42,14 +42,30 @@ public interface TaskDeleteService { * @param actionsAsSet the actions * @param ids the id's */ - void cleanupExecutions(Set actionsAsSet, Set ids); + void cleanupExecutions(Set actionsAsSet, Set ids, String schemaTarget); + + /** + * Clean up the resources that resuled from running the task with the given name and actions. + * + * @param actionsAsSet the actions + * @param taskName the task name + * @param completed the completion state of the task executions + */ + void cleanupExecutions(Set actionsAsSet, String taskName, boolean completed); /** * Delete one or more Task executions. * * @param ids Collection of task execution ids to delete. Must contain at least 1 id. */ - void deleteTaskExecutions(Set ids); + void deleteTaskExecutions(Set ids, String schemaTarget); + + /** + * Delete task executions by name and execution state. + * @param taskName the name of the task executions + * @param onlyCompleted indicator to delete only completed tasks + */ + void deleteTaskExecutions(String taskName, boolean onlyCompleted); /** * Destroy the task definition. If it is a Composed Task then the task definitions diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java index bb598239a7..8367916a27 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionInfoService.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.Map; +import java.util.Set; import org.springframework.cloud.dataflow.core.AllPlatformsTaskExecutionInformation; import org.springframework.cloud.dataflow.server.service.impl.TaskExecutionInformation; @@ -58,4 +59,6 @@ TaskExecutionInformation findTaskExecutionInformation(String taskName, * @since 2.3 */ List createTaskDeploymentRequests(String taskName, String dslText); + + Set composedTaskChildNames(String taskName); } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java index 988bc97ec4..592924fd18 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskExecutionService.java @@ -19,7 +19,9 @@ import java.util.Map; import java.util.Set; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.TaskManifest; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; /** * Provides Task related services. @@ -43,7 +45,7 @@ public interface TaskExecutionService { * @param commandLineArgs Optional runtime commandline argument * @return the taskExecutionId for the executed task. */ - long executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs); + LaunchResponse executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs); /** * Retrieve logs for the task application. @@ -52,14 +54,14 @@ public interface TaskExecutionService { * @param taskId the ID that uniquely identifies the task * @return the logs of the task application. */ - String getLog(String platformName, String taskId); + String getLog(String platformName, String taskId, String schemaTarget); /** * Request the platform to stop the task executions for the ids provided. * * @param ids a set of ids for the task executions to be stopped. */ - void stopTaskExecution(Set ids); + void stopTaskExecution(Set ids, String schemaTarget); /** * Request the platform to stop the task executions for the ids provided. @@ -67,14 +69,14 @@ public interface TaskExecutionService { * @param ids a set of ids for the task executions to be stopped. * @param platform The name of the platform where the tasks are executing. */ - void stopTaskExecution(Set ids, String platform); + void stopTaskExecution(Set ids, String schemaTarget, String platform); /** * Retrieve the TaskManifest for the execution id provided * @param id task exectution id * @return {@code TaskManifest} or null if not found. */ - TaskManifest findTaskManifestById(Long id); + TaskManifest findTaskManifestById(Long id, String schemaTarget); /** * Returns all the task execution IDs with the option to include only the completed task executions. diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java index f79d8b05c1..4e3ac6110b 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/TaskJobService.java @@ -32,7 +32,9 @@ import org.springframework.cloud.dataflow.server.batch.JobExecutionWithStepCount; import org.springframework.cloud.dataflow.server.job.support.JobNotRestartableException; import org.springframework.cloud.task.repository.TaskExecution; +import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.web.bind.annotation.RequestParam; /** * Repository that retrieves Tasks and JobExecutions/Instances and the associations @@ -52,7 +54,7 @@ public interface TaskJobService { * @throws NoSuchJobExecutionException in the event that a job execution id specified is * not present when looking up stepExecutions for the result. */ - List listJobExecutions(Pageable pageable) throws NoSuchJobExecutionException; + Page listJobExecutions(Pageable pageable) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository @@ -63,8 +65,7 @@ public interface TaskJobService { * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCount(Pageable pageable, String jobName) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCount(Pageable pageable, String jobName) throws NoSuchJobException; /** * Retrieves a JobExecution from the JobRepository and matches it with a task id. @@ -74,7 +75,7 @@ List listJobExecutionsForJobWithStepCount(Pageable pageable, S * @throws NoSuchJobExecutionException if the specified job execution for the id does not * exist. */ - TaskJobExecution getJobExecution(long id) throws NoSuchJobExecutionException; + TaskJobExecution getJobExecution(long id, String schemaTarget) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobInstanceExecutions} from the JobRepository with a @@ -85,8 +86,7 @@ List listJobExecutionsForJobWithStepCount(Pageable pageable, S * @return List containing {@link JobInstanceExecutions}. * @throws NoSuchJobException if the job for the jobName specified does not exist. */ - List listTaskJobInstancesForJobName(Pageable pageable, String jobName) - throws NoSuchJobException; + Page listTaskJobInstancesForJobName(Pageable pageable, String jobName) throws NoSuchJobException; /** * Retrieves a {@link JobInstance} from the JobRepository and matches it with the @@ -97,33 +97,7 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * @throws NoSuchJobInstanceException if job instance id does not exist. * @throws NoSuchJobException if the job for the job instance does not exist. */ - JobInstanceExecutions getJobInstance(long id) throws NoSuchJobInstanceException, NoSuchJobException; - - /** - * Retrieves the total number of job instances for a job name. - * - * @param jobName the name of the job instance. - * @return the number of job instances associated with the jobName. - * @throws NoSuchJobException if the job for jobName specified does not exist. - */ - int countJobInstances(String jobName) throws NoSuchJobException; - - /** - * Retrieves the total number of the job executions. - * - * @return the total number of job executions. - */ - int countJobExecutions(); - - /** - * Retrieves the total number {@link JobExecution} that match a specific job name. - * - * @param jobName the job name to findByTaskNameContains. - * @param status the status of the job execution - * @return the number of {@link JobExecution}s that match the job name. - * @throws NoSuchJobException if the job for the jobName does not exist. - */ - int countJobExecutionsForJob(String jobName, BatchStatus status) throws NoSuchJobException; + JobInstanceExecutions getJobInstance(long id, String schemaTarget) throws NoSuchJobInstanceException, NoSuchJobException; /** * Restarts a {@link JobExecution} IF the respective {@link JobExecution} is actually @@ -133,7 +107,7 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * @throws NoSuchJobExecutionException if the JobExecution for the provided id does not * exist. */ - void restartJobExecution(long jobExecutionId) throws NoSuchJobExecutionException; + void restartJobExecution(long jobExecutionId, String schemaTarget) throws NoSuchJobExecutionException; /** * Requests a {@link JobExecution} to stop. @@ -150,7 +124,7 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * not running. * @see org.springframework.cloud.dataflow.server.batch.JobService#stop(Long) */ - void stopJobExecution(long jobExecutionId) throws NoSuchJobExecutionException, JobExecutionNotRunningException; + void stopJobExecution(long jobExecutionId, String schemaTarget) throws NoSuchJobExecutionException, JobExecutionNotRunningException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount}s from the JobRepository @@ -162,7 +136,7 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * @throws NoSuchJobExecutionException thrown if the job execution specified does not * exist. */ - List listJobExecutionsWithStepCount(Pageable pageable) throws NoSuchJobExecutionException; + Page listJobExecutionsWithStepCount(Pageable pageable) throws NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecution} from the JobRepository with a specific @@ -174,8 +148,7 @@ List listTaskJobInstancesForJobName(Pageable pageable, St * @return List containing {@link TaskJobExecution}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJob(Pageable pageable, String jobName, BatchStatus status) - throws NoSuchJobException; + Page listJobExecutionsForJob(Pageable pageable, String jobName, BatchStatus status) throws NoSuchJobException, NoSuchJobExecutionException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository @@ -187,8 +160,7 @@ List listJobExecutionsForJob(Pageable pageable, String jobName * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCount(Pageable pageable, Date fromDate, Date toDate) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCount(Pageable pageable, Date fromDate, Date toDate) throws NoSuchJobException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository @@ -199,8 +171,7 @@ List listJobExecutionsForJobWithStepCount(Pageable pageable, D * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(Pageable pageable, int jobInstanceId) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCountFilteredByJobInstanceId(Pageable pageable, int jobInstanceId, String schemaTarget) throws NoSuchJobException; /** * Retrieves Pageable list of {@link JobExecutionWithStepCount} from the JobRepository @@ -211,6 +182,5 @@ List listJobExecutionsForJobWithStepCountFilteredByJobInstance * @return List containing {@link JobExecutionWithStepCount}s. * @throws NoSuchJobException if the job with the given name does not exist. */ - List listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(Pageable pageable, int taskExecutionId) - throws NoSuchJobException; + Page listJobExecutionsForJobWithStepCountFilteredByTaskExecutionId(Pageable pageable, int taskExecutionId, String schemaTarget) throws NoSuchJobException; } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java index 5b3ef9418b..8a63fe868f 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskDeleteService.java @@ -16,9 +16,9 @@ package org.springframework.cloud.dataflow.server.service.impl; +import javax.sql.DataSource; import java.util.Collection; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -30,36 +30,38 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -import javax.sql.DataSource; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.core.AuditActionType; import org.springframework.cloud.dataflow.core.AuditOperationType; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskDeployment; +import org.springframework.cloud.dataflow.core.database.support.DatabaseType; import org.springframework.cloud.dataflow.core.dsl.TaskNode; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; +import org.springframework.cloud.dataflow.schema.service.SchemaService; import org.springframework.cloud.dataflow.server.controller.support.TaskExecutionControllerDeleteAction; import org.springframework.cloud.dataflow.server.job.LauncherRepository; -import org.springframework.cloud.dataflow.server.repository.CannotDeleteNonParentTaskExecutionException; import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowJobExecutionDaoContainer; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; -import org.springframework.cloud.dataflow.server.repository.support.DatabaseType; import org.springframework.cloud.dataflow.server.service.SchedulerService; import org.springframework.cloud.dataflow.server.service.TaskDeleteService; import org.springframework.cloud.deployer.spi.task.TaskLauncher; -import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.jdbc.support.MetaDataAccessException; import org.springframework.orm.ObjectOptimisticLockingFailureException; import org.springframework.transaction.annotation.Transactional; @@ -95,7 +97,7 @@ public class DefaultTaskDeleteService implements TaskDeleteService { /** * Used to read TaskExecutions. */ - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; private final LauncherRepository launcherRepository; @@ -105,38 +107,44 @@ public class DefaultTaskDeleteService implements TaskDeleteService { protected final AuditRecordService auditRecordService; - protected final DataflowTaskExecutionDao dataflowTaskExecutionDao; + protected final DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer; - protected final DataflowJobExecutionDao dataflowJobExecutionDao; + protected final DataflowJobExecutionDaoContainer dataflowJobExecutionDaoContainer; - protected final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao; + protected final DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer; - private SchedulerService schedulerService; + private final SchedulerService schedulerService; private final ArgumentSanitizer argumentSanitizer = new ArgumentSanitizer(); - private int taskDeleteChunkSize; + private final SchemaService schemaService; - private DataSource dataSource; + private final int taskDeleteChunkSize; - public DefaultTaskDeleteService(TaskExplorer taskExplorer, LauncherRepository launcherRepository, + private final DataSource dataSource; + + public DefaultTaskDeleteService( + AggregateTaskExplorer taskExplorer, + LauncherRepository launcherRepository, TaskDefinitionRepository taskDefinitionRepository, TaskDeploymentRepository taskDeploymentRepository, AuditRecordService auditRecordService, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowJobExecutionDao dataflowJobExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowJobExecutionDaoContainer dataflowJobExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, SchedulerService schedulerService, + SchemaService schemaService, TaskConfigurationProperties taskConfigurationProperties, - DataSource dataSource) { + DataSource dataSource + ) { Assert.notNull(taskExplorer, "TaskExplorer must not be null"); Assert.notNull(launcherRepository, "LauncherRepository must not be null"); Assert.notNull(taskDefinitionRepository, "TaskDefinitionRepository must not be null"); Assert.notNull(taskDeploymentRepository, "TaskDeploymentRepository must not be null"); Assert.notNull(auditRecordService, "AuditRecordService must not be null"); - Assert.notNull(dataflowTaskExecutionDao, "DataflowTaskExecutionDao must not be null"); - Assert.notNull(dataflowJobExecutionDao, "DataflowJobExecutionDao must not be null"); - Assert.notNull(dataflowTaskExecutionMetadataDao, "DataflowTaskExecutionMetadataDao must not be null"); + Assert.notNull(dataflowTaskExecutionDaoContainer, "DataflowTaskExecutionDaoContainer must not be null"); + Assert.notNull(dataflowJobExecutionDaoContainer, "DataflowJobExecutionDaoContainer must not be null"); + Assert.notNull(dataflowTaskExecutionMetadataDaoContainer, "DataflowTaskExecutionMetadataDaoContainer must not be null"); Assert.notNull(taskConfigurationProperties, "TaskConfigurationProperties must not be null"); Assert.notNull(dataSource, "DataSource must not be null"); @@ -145,103 +153,186 @@ public DefaultTaskDeleteService(TaskExplorer taskExplorer, LauncherRepository la this.taskDefinitionRepository = taskDefinitionRepository; this.taskDeploymentRepository = taskDeploymentRepository; this.auditRecordService = auditRecordService; - this.dataflowTaskExecutionDao = dataflowTaskExecutionDao; - this.dataflowJobExecutionDao = dataflowJobExecutionDao; - this.dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDao; + this.dataflowTaskExecutionDaoContainer = dataflowTaskExecutionDaoContainer; + this.dataflowJobExecutionDaoContainer = dataflowJobExecutionDaoContainer; + this.dataflowTaskExecutionMetadataDaoContainer = dataflowTaskExecutionMetadataDaoContainer; this.schedulerService = schedulerService; + this.schemaService = schemaService; this.taskDeleteChunkSize = taskConfigurationProperties.getExecutionDeleteChunkSize(); this.dataSource = dataSource; } @Override - public void cleanupExecution(long id) { - TaskExecution taskExecution = taskExplorer.getTaskExecution(id); + @Transactional + public void cleanupExecution(long id, String schemaTarget) { + performCleanupExecution(id, schemaTarget); + } + + private void performCleanupExecution(long id, String schemaTarget) { + AggregateTaskExecution taskExecution = taskExplorer.getTaskExecution(id, schemaTarget); Assert.notNull(taskExecution, "There was no task execution with id " + id); String launchId = taskExecution.getExternalExecutionId(); if (!StringUtils.hasText(launchId)) { - logger.warn("Did not find External execution ID for taskName = [{}], taskId = [{}]. Nothing to clean up.", - taskExecution.getTaskName(), id); + logger.warn("Did not find External execution ID for taskName = [{}], taskId = [{}]. Nothing to clean up.", taskExecution.getTaskName(), id); return; } TaskDeployment taskDeployment = this.taskDeploymentRepository.findByTaskDeploymentId(launchId); if (taskDeployment == null) { - logger.warn("Did not find TaskDeployment for taskName = [{}], taskId = [{}]. Nothing to clean up.", - taskExecution.getTaskName(), id); + logger.warn("Did not find TaskDeployment for taskName = [{}], taskId = [{}]. Nothing to clean up.", taskExecution.getTaskName(), id); return; } Launcher launcher = launcherRepository.findByName(taskDeployment.getPlatformName()); if (launcher != null) { TaskLauncher taskLauncher = launcher.getTaskLauncher(); taskLauncher.cleanup(launchId); + } else { + logger.info("Could clean up execution for task id " + id + ". Did not find a task platform named " + taskDeployment.getPlatformName()); } - else { - logger.info( - "Could clean up execution for task id " + id + ". Did not find a task platform named " + - taskDeployment.getPlatformName()); + } + + @Override + @Transactional + public void cleanupExecutions(Set actionsAsSet, String taskName, boolean completed) { + List tasks = this.taskExplorer.findTaskExecutionsByName(taskName, completed); + final Set parentExecutions = new HashSet<>(); + final Set childExecutions = new HashSet<>(); + boolean removeData = actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA); + boolean cleanUp = actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP); + for (AggregateTaskExecution taskExecution : tasks) { + if (taskExecution.getParentExecutionId() == null) { + parentExecutions.add(taskExecution); + } else { + childExecutions.add(taskExecution); + } + } + if (cleanUp) { + for (AggregateTaskExecution taskExecution : tasks) { + this.performCleanupExecution(taskExecution.getExecutionId(), taskExecution.getSchemaTarget()); + } + } + + if (removeData) { + if (!childExecutions.isEmpty()) { + deleteTaskExecutions(childExecutions); + } + if (!parentExecutions.isEmpty()) { + Map> parents = parentExecutions.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String schemaTarget : parents.keySet()) { + SortedSet parentIds = parents.get(schemaTarget) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + Map> children = this.taskExplorer.findChildTaskExecutions(parentIds, schemaTarget) + .stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String group : children.keySet()) { + SortedSet childIds = children.get(group) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + this.performDeleteTaskExecutions(childIds, group); + + } + this.performDeleteTaskExecutions(parentIds, schemaTarget); + } + } + } + } + + private void deleteTaskExecutions(Collection taskExecutions) { + Map> executions = taskExecutions.stream() + .collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String schemaTarget : executions.keySet()) { + SortedSet executionIds = executions.get(schemaTarget) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toCollection(TreeSet::new)); + this.performDeleteTaskExecutions(executionIds, schemaTarget); } } @Override - public void cleanupExecutions(Set actionsAsSet, Set ids) { - final SortedSet nonExistingTaskExecutions = new TreeSet<>(); - final SortedSet nonParentTaskExecutions = new TreeSet<>(); - final SortedSet deletableTaskExecutions = new TreeSet<>(); + @Transactional + public void cleanupExecutions(Set actionsAsSet, Set ids, String schemaTarget) { + performCleanupExecutions(actionsAsSet, ids, schemaTarget); + } + private void performCleanupExecutions(Set actionsAsSet, Set ids, String schemaTarget) { + final SortedSet nonExistingTaskExecutions = new TreeSet<>(); + final SortedSet parentExecutions = new TreeSet<>(); + final SortedSet childExecutions = new TreeSet<>(); + boolean removeData = actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA); + boolean cleanUp = actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP); for (Long id : ids) { - final TaskExecution taskExecution = this.taskExplorer.getTaskExecution(id); + final AggregateTaskExecution taskExecution = this.taskExplorer.getTaskExecution(id, schemaTarget); if (taskExecution == null) { nonExistingTaskExecutions.add(id); - } - else { - final Long parentExecutionId = taskExecution.getParentExecutionId(); - - if (parentExecutionId != null) { - nonParentTaskExecutions.add(parentExecutionId); - } - else { - deletableTaskExecutions.add(taskExecution.getExecutionId()); - } + } else if (taskExecution.getParentExecutionId() == null) { + parentExecutions.add(taskExecution.getExecutionId()); + } else { + childExecutions.add(taskExecution.getExecutionId()); } } - if (!nonExistingTaskExecutions.isEmpty()) { if (nonExistingTaskExecutions.size() == 1) { - throw new NoSuchTaskExecutionException(nonExistingTaskExecutions.first()); - } - else { - throw new NoSuchTaskExecutionException(nonExistingTaskExecutions); + throw new NoSuchTaskExecutionException(nonExistingTaskExecutions.first(), schemaTarget); + } else { + throw new NoSuchTaskExecutionException(nonExistingTaskExecutions, schemaTarget); } } - if (actionsAsSet.contains(TaskExecutionControllerDeleteAction.CLEANUP)) { + if (cleanUp) { for (Long id : ids) { - this.cleanupExecution(id); + this.performCleanupExecution(id, schemaTarget); } } - if (actionsAsSet.contains(TaskExecutionControllerDeleteAction.REMOVE_DATA)) { - if (!deletableTaskExecutions.isEmpty()) { - this.deleteTaskExecutions(deletableTaskExecutions); - } - // delete orphaned child execution ids - else if (deletableTaskExecutions.isEmpty() && !nonParentTaskExecutions.isEmpty()) { - this.deleteTaskExecutions(nonParentTaskExecutions); + + if (removeData) { + if (!childExecutions.isEmpty()) { + this.performDeleteTaskExecutions(childExecutions, schemaTarget); } - else if (!nonParentTaskExecutions.isEmpty()) { - throw new CannotDeleteNonParentTaskExecutionException(nonParentTaskExecutions); + if (!parentExecutions.isEmpty()) { + List children = this.taskExplorer.findChildTaskExecutions(parentExecutions, schemaTarget); + if (!children.isEmpty()) { + this.deleteTaskExecutions(children); + } + this.performDeleteTaskExecutions(parentExecutions, schemaTarget); } } - } @Override @Transactional - public void deleteTaskExecutions(Set taskExecutionIds) { + public void deleteTaskExecutions(Set taskExecutionIds, String schemaTarget) { + performDeleteTaskExecutions(taskExecutionIds, schemaTarget); + } + + @Override + public void deleteTaskExecutions(String taskName, boolean onlyCompleted) { + Map> tasks = this.taskExplorer.findTaskExecutionsByName(taskName, onlyCompleted) + .stream().collect(Collectors.groupingBy(AggregateTaskExecution::getSchemaTarget)); + for (String schemaTarget : tasks.keySet()) { + Set executionIds = tasks.get(schemaTarget) + .stream() + .map(AggregateTaskExecution::getExecutionId) + .collect(Collectors.toSet()); + performDeleteTaskExecutions(executionIds, schemaTarget); + } + } + + private void performDeleteTaskExecutions(Set taskExecutionIds, String schemaTarget) { + logger.info("performDeleteTaskExecutions:{}:{}", schemaTarget, taskExecutionIds); Assert.notEmpty(taskExecutionIds, "You must provide at least 1 task execution id."); - final Set taskExecutionIdsWithChildren = new HashSet<>(taskExecutionIds); + final DataflowTaskExecutionDao dataflowTaskExecutionDao = dataflowTaskExecutionDaoContainer.get(schemaTarget); + final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaTarget); + final Set taskExecutionIdsWithChildren = new HashSet<>(taskExecutionIds); final Set childTaskExecutionIds = dataflowTaskExecutionDao.findChildTaskExecutionIds(taskExecutionIds); - logger.info("Found {} child task execution ids: {}.", childTaskExecutionIds.size(), StringUtils.collectionToCommaDelimitedString(childTaskExecutionIds)); + logger.info("Found {} child task execution ids: {}.", + childTaskExecutionIds.size(), + StringUtils.collectionToCommaDelimitedString(childTaskExecutionIds)); taskExecutionIdsWithChildren.addAll(childTaskExecutionIds); final Map auditData = new LinkedHashMap<>(); @@ -254,7 +345,7 @@ public void deleteTaskExecutions(Set taskExecutionIds) { final Set jobExecutionIds = new HashSet<>(); for (Long taskExecutionId : taskExecutionIdsWithChildren) { - jobExecutionIds.addAll(taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecutionId)); + jobExecutionIds.addAll(taskExplorer.getJobExecutionIdsByTaskExecutionId(taskExecutionId, schemaTarget)); } logger.info("There are {} associated job executions.", jobExecutionIds.size()); @@ -266,7 +357,7 @@ public void deleteTaskExecutions(Set taskExecutionIds) { int chunkSize = getTaskExecutionDeleteChunkSize(this.dataSource); if (!jobExecutionIds.isEmpty()) { - deleteRelatedJobAndStepExecutions(jobExecutionIds, auditData, chunkSize); + deleteRelatedJobAndStepExecutions(jobExecutionIds, auditData, chunkSize, schemaTarget); } // Delete Task Related Data @@ -274,149 +365,141 @@ public void deleteTaskExecutions(Set taskExecutionIds) { auditData.put("Deleted # of Task Executions", taskExecutionIdsWithChildren.size()); auditData.put("Deleted Task Execution IDs", StringUtils.collectionToDelimitedString(taskExecutionIdsWithChildren, ", ")); - final AtomicInteger numberOfDeletedTaskExecutionParamRows = new AtomicInteger(0); - final AtomicInteger numberOfDeletedTaskTaskBatchRelationshipRows = new AtomicInteger(0); - final AtomicInteger numberOfDeletedTaskManifestRows = new AtomicInteger(0); - final AtomicInteger numberOfDeletedTaskExecutionRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskExecutionParamRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskTaskBatchRelationshipRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskManifestRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedTaskExecutionRows = new AtomicInteger(0); if (chunkSize <= 0) { - numberOfDeletedTaskExecutionParamRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdsWithChildren)); - numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds(taskExecutionIdsWithChildren)); - numberOfDeletedTaskManifestRows.addAndGet(this.dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdsWithChildren)); - numberOfDeletedTaskExecutionRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdsWithChildren)); - } - else { - split(taskExecutionIdsWithChildren, chunkSize) - .stream() - .forEach(taskExecutionIdSubsetList -> { - Set taskExecutionIdSubset = new HashSet<>(taskExecutionIdSubsetList); - numberOfDeletedTaskExecutionParamRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdSubset)); - numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds(taskExecutionIdSubset)); - numberOfDeletedTaskManifestRows.addAndGet(this.dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdSubset)); - numberOfDeletedTaskExecutionRows.addAndGet(this.dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdSubset)); - }); + numberOfDeletedTaskExecutionParamRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdsWithChildren)); + numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds( + taskExecutionIdsWithChildren)); + numberOfDeletedTaskManifestRows.addAndGet(dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdsWithChildren)); + numberOfDeletedTaskExecutionRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdsWithChildren)); + } else { + split(taskExecutionIdsWithChildren, chunkSize).forEach(taskExecutionIdSubsetList -> { + Set taskExecutionIdSubset = new HashSet<>(taskExecutionIdSubsetList); + numberOfDeletedTaskExecutionParamRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionParamsByTaskExecutionIds(taskExecutionIdSubset)); + numberOfDeletedTaskTaskBatchRelationshipRows.addAndGet(dataflowTaskExecutionDao.deleteTaskTaskBatchRelationshipsByTaskExecutionIds( + taskExecutionIdSubset)); + numberOfDeletedTaskManifestRows.addAndGet(dataflowTaskExecutionMetadataDao.deleteManifestsByTaskExecutionIds(taskExecutionIdSubset)); + numberOfDeletedTaskExecutionRows.addAndGet(dataflowTaskExecutionDao.deleteTaskExecutionsByTaskExecutionIds(taskExecutionIdSubset)); + }); } logger.info("Deleted the following Task Execution related data for {} Task Executions:\n" + - "Task Execution Param Rows: {}\n" + - "Task Batch Relationship Rows: {}\n" + - "Task Manifest Rows: {}\n" + - "Task Execution Rows: {}.", + "Task Execution Param Rows: {}\n" + + "Task Batch Relationship Rows: {}\n" + + "Task Manifest Rows: {}\n" + + "Task Execution Rows: {}.", taskExecutionIdsWithChildren.size(), numberOfDeletedTaskExecutionParamRows, numberOfDeletedTaskTaskBatchRelationshipRows, numberOfDeletedTaskManifestRows, numberOfDeletedTaskExecutionRows - ); + ); // Populate Audit Record - auditRecordService.populateAndSaveAuditRecordUsingMapData( - AuditOperationType.TASK, AuditActionType.DELETE, - taskExecutionIdsWithChildren.size() + " Task Execution Delete(s)", auditData, null); + auditRecordService.populateAndSaveAuditRecordUsingMapData(AuditOperationType.TASK, + AuditActionType.DELETE, + taskExecutionIdsWithChildren.size() + " Task Execution Delete(s)", + auditData, + null); } - - private void deleteRelatedJobAndStepExecutions(Set jobExecutionIds, - Map auditData, int chunkSize) { - final Set stepExecutionIds = findStepExecutionIds(jobExecutionIds, chunkSize); + private void deleteRelatedJobAndStepExecutions(Set jobExecutionIds, Map auditData, int chunkSize, String schemaTarget) { + + final Set stepExecutionIds = findStepExecutionIds(jobExecutionIds, chunkSize, schemaTarget); final AtomicInteger numberOfDeletedBatchStepExecutionContextRows = new AtomicInteger(0); if (!stepExecutionIds.isEmpty()) { - deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds, chunkSize, numberOfDeletedBatchStepExecutionContextRows); + deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds, chunkSize, numberOfDeletedBatchStepExecutionContextRows, schemaTarget); } + deleteStepAndJobExecutionsByJobExecutionId(jobExecutionIds, chunkSize, auditData, numberOfDeletedBatchStepExecutionContextRows, schemaTarget); - deleteStepAndJobExecutionsByJobExecutionId(jobExecutionIds, chunkSize, auditData, numberOfDeletedBatchStepExecutionContextRows); - } - - private Set findStepExecutionIds(Set jobExecutionIds, int chunkSize) { - final Set stepExecutionIds = ConcurrentHashMap.newKeySet(); + private Set findStepExecutionIds(Set jobExecutionIds, int chunkSize, String schemaTarget) { + final Set stepExecutionIds = ConcurrentHashMap.newKeySet(); + DataflowJobExecutionDao dataflowJobExecutionDao = dataflowJobExecutionDaoContainer.get(schemaTarget); if (chunkSize <= 0) { stepExecutionIds.addAll(dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIds)); + } else { + split(jobExecutionIds, chunkSize).forEach(jobExecutionIdSubsetList -> { + Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); + stepExecutionIds.addAll(dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIdSubset)); + }); } - else { - split(jobExecutionIds, chunkSize) - .stream() - .forEach(jobExecutionIdSubsetList -> { - Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); - stepExecutionIds.addAll(dataflowJobExecutionDao.findStepExecutionIds(jobExecutionIdSubset)); - }); - } - + return stepExecutionIds; } - - private void deleteBatchStepExecutionContextByStepExecutionIds(Set stepExecutionIds, int chunkSize, AtomicInteger numberOfDeletedBatchStepExecutionContextRows) { + + private void deleteBatchStepExecutionContextByStepExecutionIds( + Set stepExecutionIds, + int chunkSize, + AtomicInteger numberOfDeletedBatchStepExecutionContextRows, + String schemaTarget + ) { + final DataflowJobExecutionDao dataflowJobExecutionDao = dataflowJobExecutionDaoContainer.get(schemaTarget); if (chunkSize <= 0) { - numberOfDeletedBatchStepExecutionContextRows - .addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds)); + numberOfDeletedBatchStepExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIds)); - } - else { - split(stepExecutionIds, chunkSize) - .stream() - .forEach(stepExecutionIdSubsetList -> { - Set stepExecutionIdSubset = new HashSet<>(stepExecutionIdSubsetList); - numberOfDeletedBatchStepExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds(stepExecutionIdSubset)); - }); + } else { + split(stepExecutionIds, chunkSize).forEach(stepExecutionIdSubsetList -> { + Set stepExecutionIdSubset = new HashSet<>(stepExecutionIdSubsetList); + numberOfDeletedBatchStepExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionContextByStepExecutionIds( + stepExecutionIdSubset)); + }); } } - - private void deleteStepAndJobExecutionsByJobExecutionId(Set jobExecutionIds, int chunkSize, Map auditData, AtomicInteger numberOfDeletedBatchStepExecutionContextRows) { - + + private void deleteStepAndJobExecutionsByJobExecutionId( + Set jobExecutionIds, + int chunkSize, + Map auditData, + AtomicInteger numberOfDeletedBatchStepExecutionContextRows, + String schemaTarget + ) { + DataflowJobExecutionDao dataflowJobExecutionDao = dataflowJobExecutionDaoContainer.get(schemaTarget); final AtomicInteger numberOfDeletedBatchStepExecutionRows = new AtomicInteger(0); - final AtomicInteger numberOfDeletedBatchJobExecutionContextRows = new AtomicInteger( - 0); - final AtomicInteger numberOfDeletedBatchJobExecutionParamRows = new AtomicInteger( - 0); + final AtomicInteger numberOfDeletedBatchJobExecutionContextRows = new AtomicInteger(0); + final AtomicInteger numberOfDeletedBatchJobExecutionParamRows = new AtomicInteger(0); final AtomicInteger numberOfDeletedBatchJobExecutionRows = new AtomicInteger(0); - + if (chunkSize <= 0) { - numberOfDeletedBatchStepExecutionRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIds)); - numberOfDeletedBatchJobExecutionContextRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds(jobExecutionIds)); - numberOfDeletedBatchJobExecutionParamRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIds)); - numberOfDeletedBatchJobExecutionRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIds)); - } - else { - split(jobExecutionIds, chunkSize) - .stream() - .forEach(jobExecutionIdSubsetList -> {Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); - numberOfDeletedBatchStepExecutionRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIdSubset)); - numberOfDeletedBatchJobExecutionContextRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds(jobExecutionIdSubset)); - numberOfDeletedBatchJobExecutionParamRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIdSubset)); - numberOfDeletedBatchJobExecutionRows.addAndGet(this.dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIdSubset)); - }); + numberOfDeletedBatchStepExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionParamRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIds)); + numberOfDeletedBatchJobExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIds)); + } else { + split(jobExecutionIds, chunkSize).forEach(jobExecutionIdSubsetList -> { + Set jobExecutionIdSubset = new HashSet<>(jobExecutionIdSubsetList); + numberOfDeletedBatchStepExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchStepExecutionsByJobExecutionIds(jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionContextRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionContextByJobExecutionIds( + jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionParamRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionParamsByJobExecutionIds(jobExecutionIdSubset)); + numberOfDeletedBatchJobExecutionRows.addAndGet(dataflowJobExecutionDao.deleteBatchJobExecutionByJobExecutionIds(jobExecutionIdSubset)); + }); } - + final int numberOfDeletedUnusedBatchJobInstanceRows = dataflowJobExecutionDao.deleteUnusedBatchJobInstances(); - logger.info( - "Deleted the following Batch Job Execution related data for {} Job Executions.\n" - + "Batch Step Execution Context Rows: {}\n" - + "Batch Step Executions Rows: {}\n" - + "Batch Job Execution Context Rows: {}\n" - + "Batch Job Execution Param Rows: {}\n" - + "Batch Job Execution Rows: {}\n" - + "Batch Job Instance Rows: {}.", - jobExecutionIds.size(), numberOfDeletedBatchStepExecutionContextRows, + logger.info("Deleted the following Batch Job Execution related data for {} Job Executions.\n" + "Batch Step Execution Context Rows: {}\n" + "Batch Step Executions Rows: {}\n" + "Batch Job Execution Context Rows: {}\n" + "Batch Job Execution Param Rows: {}\n" + "Batch Job Execution Rows: {}\n" + "Batch Job Instance Rows: {}.", + jobExecutionIds.size(), + numberOfDeletedBatchStepExecutionContextRows, numberOfDeletedBatchStepExecutionRows, numberOfDeletedBatchJobExecutionContextRows, numberOfDeletedBatchJobExecutionParamRows, numberOfDeletedBatchJobExecutionRows, numberOfDeletedUnusedBatchJobInstanceRows); - auditData.put("Batch Step Execution Context", - numberOfDeletedBatchStepExecutionContextRows); + auditData.put("Batch Step Execution Context", numberOfDeletedBatchStepExecutionContextRows); auditData.put("Batch Step Executions", numberOfDeletedBatchStepExecutionRows); - auditData.put("Batch Job Execution Context Rows", - numberOfDeletedBatchJobExecutionContextRows); - auditData.put("Batch Job Execution Params", - numberOfDeletedBatchJobExecutionParamRows); + auditData.put("Batch Job Execution Context Rows", numberOfDeletedBatchJobExecutionContextRows); + auditData.put("Batch Job Execution Params", numberOfDeletedBatchJobExecutionParamRows); auditData.put("Batch Job Executions", numberOfDeletedBatchJobExecutionRows); - auditData.put("Batch Job Instance Rows", - numberOfDeletedUnusedBatchJobInstanceRows); + auditData.put("Batch Job Instance Rows", numberOfDeletedUnusedBatchJobInstanceRows); } /** @@ -424,6 +507,7 @@ private void deleteStepAndJobExecutionsByJobExecutionId(Set jobExecutionId * greater than zero this overrides the chunk size for the specific database type. * If the database type has no fixed number of maximum elements allowed in the {@code IN} clause * then zero is returned. + * * @param dataSource the datasource used by data flow. * @return the chunk size to be used for deleting task executions. */ @@ -439,8 +523,7 @@ private int getTaskExecutionDeleteChunkSize(DataSource dataSource) { if (name.startsWith("ORACLE")) { result = ORACLE_SERVER_CHUNK_SIZE; } - } - catch (MetaDataAccessException mdae) { + } catch (MetaDataAccessException mdae) { logger.warn("Unable to retrieve metadata for database when deleting task executions", mdae); } } @@ -449,32 +532,34 @@ private int getTaskExecutionDeleteChunkSize(DataSource dataSource) { static Collection> split(Collection input, int max) { final AtomicInteger count = new AtomicInteger(0); - return input.stream() - .collect(Collectors.groupingBy(s -> count.getAndIncrement() / max)) - .values(); + return input.stream().collect(Collectors.groupingBy(s -> count.getAndIncrement() / max)).values(); } @Override public void deleteTaskDefinition(String name) { - TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(name) - .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); + TaskDefinition taskDefinition = this.taskDefinitionRepository.findById(name).orElseThrow(() -> new NoSuchTaskDefinitionException(name)); deleteTaskDefinition(taskDefinition); - auditRecordService.populateAndSaveAuditRecord( - AuditOperationType.TASK, AuditActionType.DELETE, - taskDefinition.getTaskName(), this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), null); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.TASK, + AuditActionType.DELETE, + taskDefinition.getTaskName(), + this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), + null); } @Override public void deleteTaskDefinition(String name, boolean cleanup) { if (cleanup) { - Set taskExecutionIds = this.dataflowTaskExecutionDao.getTaskExecutionIdsByTaskName(name); - final Set actionsAsSet = new HashSet<>(); - actionsAsSet.add(TaskExecutionControllerDeleteAction.CLEANUP); - actionsAsSet.add(TaskExecutionControllerDeleteAction.REMOVE_DATA); - if (!taskExecutionIds.isEmpty()) { - cleanupExecutions(actionsAsSet, taskExecutionIds); + for (SchemaVersionTarget target : schemaService.getTargets().getSchemas()) { + DataflowTaskExecutionDao dataflowTaskExecutionDao = dataflowTaskExecutionDaoContainer.get(target.getName()); + Set taskExecutionIds = dataflowTaskExecutionDao.getTaskExecutionIdsByTaskName(name); + final Set actionsAsSet = new HashSet<>(); + actionsAsSet.add(TaskExecutionControllerDeleteAction.CLEANUP); + actionsAsSet.add(TaskExecutionControllerDeleteAction.REMOVE_DATA); + if (!taskExecutionIds.isEmpty()) { + performCleanupExecutions(actionsAsSet, taskExecutionIds, target.getName()); + } } } this.deleteTaskDefinition(name); @@ -487,9 +572,11 @@ public void deleteAll() { for (TaskDefinition taskDefinition : allTaskDefinition) { deleteTaskDefinition(taskDefinition); - auditRecordService.populateAndSaveAuditRecord( - AuditOperationType.TASK, AuditActionType.DELETE, - taskDefinition.getTaskName(), this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), null); + auditRecordService.populateAndSaveAuditRecord(AuditOperationType.TASK, + AuditActionType.DELETE, + taskDefinition.getTaskName(), + this.argumentSanitizer.sanitizeTaskDsl(taskDefinition), + null); } } @@ -510,8 +597,7 @@ private void deleteTaskDefinition(TaskDefinition taskDefinition) { } try { destroyChildTask(childTaskPrefix + childName); - } - catch (ObjectOptimisticLockingFailureException e) { + } catch (ObjectOptimisticLockingFailureException e) { logger.warn("Attempted delete on a child task that is currently being deleted"); } }); @@ -519,15 +605,13 @@ private void deleteTaskDefinition(TaskDefinition taskDefinition) { // destroy normal task or composed parent task try { destroyPrimaryTask(taskDefinition.getTaskName()); - } - catch (ObjectOptimisticLockingFailureException e) { + } catch (ObjectOptimisticLockingFailureException e) { logger.warn("Attempted delete on task {} that is currently being deleted", taskDefinition.getTaskName()); } } private void destroyPrimaryTask(String name) { - TaskDefinition taskDefinition = taskDefinitionRepository.findById(name) - .orElseThrow(() -> new NoSuchTaskDefinitionException(name)); + TaskDefinition taskDefinition = taskDefinitionRepository.findById(name).orElseThrow(() -> new NoSuchTaskDefinitionException(name)); destroyTask(taskDefinition); } @@ -538,35 +622,28 @@ private void destroyChildTask(String name) { private void destroyTask(TaskDefinition taskDefinition) { taskDefinitionRepository.deleteById(taskDefinition.getName()); - TaskDeployment taskDeployment = - this.taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskDefinition.getTaskName()); + TaskDeployment taskDeployment = this.taskDeploymentRepository.findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskDefinition.getTaskName()); if (taskDeployment != null) { Launcher launcher = launcherRepository.findByName(taskDeployment.getPlatformName()); if (launcher != null) { TaskLauncher taskLauncher = launcher.getTaskLauncher(); taskLauncher.destroy(taskDefinition.getName()); } - } - else { + } else { if (!findAndDeleteTaskResourcesAcrossPlatforms(taskDefinition)) { - logger.info("TaskLauncher.destroy not invoked for task " + - taskDefinition.getTaskName() + ". Did not find a previously launched task to destroy."); + logger.info("TaskLauncher.destroy not invoked for task " + taskDefinition.getTaskName() + ". Did not find a previously launched task to destroy."); } } } private boolean findAndDeleteTaskResourcesAcrossPlatforms(TaskDefinition taskDefinition) { boolean result = false; - Iterable launchers = launcherRepository.findAll(); - Iterator launcherIterator = launchers.iterator(); - while (launcherIterator.hasNext()) { - Launcher launcher = launcherIterator.next(); + for (Launcher launcher : launcherRepository.findAll()) { try { launcher.getTaskLauncher().destroy(taskDefinition.getName()); logger.info("Deleted task app resources for {} in platform {}", taskDefinition.getName(), launcher.getName()); result = true; - } - catch (Exception ex) { + } catch (Exception ex) { logger.info("Attempted delete of app resources for {} but none found on platform {}.", taskDefinition.getName(), launcher.getName()); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java index 809c0cacc8..c5f7629a5c 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionInfoService.java @@ -19,8 +19,13 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; import org.springframework.cloud.dataflow.core.AllPlatformsTaskExecutionInformation; @@ -33,13 +38,13 @@ import org.springframework.cloud.dataflow.core.dsl.TaskNode; import org.springframework.cloud.dataflow.core.dsl.TaskParser; import org.springframework.cloud.dataflow.registry.service.AppRegistryService; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; import org.springframework.cloud.dataflow.server.job.LauncherRepository; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.service.TaskExecutionInfoService; import org.springframework.cloud.deployer.spi.core.AppDefinition; import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.core.io.Resource; import org.springframework.util.Assert; @@ -61,6 +66,7 @@ * @author Daniel Serleg */ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService { + private final static Logger logger = LoggerFactory.getLogger(DefaultTaskExecutionInfoService.class); private final DataSourceProperties dataSourceProperties; @@ -72,7 +78,7 @@ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService /** * Used to read TaskExecutions. */ - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; private final TaskDefinitionRepository taskDefinitionRepository; @@ -87,43 +93,49 @@ public class DefaultTaskExecutionInfoService implements TaskExecutionInfoService /** * Initializes the {@link DefaultTaskExecutionInfoService}. * - * @param dataSourceProperties the data source properties. - * @param appRegistryService URI registry this service will use to look up app URIs. - * @param taskExplorer the explorer this service will use to lookup task executions - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will - * use for task CRUD operations. + * @param dataSourceProperties the data source properties. + * @param appRegistryService URI registry this service will use to look up app URIs. + * @param taskExplorer the explorer this service will use to lookup task executions + * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will + * use for task CRUD operations. * @param taskConfigurationProperties the properties used to define the behavior of tasks - * @param launcherRepository the launcher repository - * @param taskPlatforms the task platforms + * @param launcherRepository the launcher repository + * @param taskPlatforms the task platforms */ @Deprecated public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties, AppRegistryService appRegistryService, - TaskExplorer taskExplorer, + AggregateTaskExplorer taskExplorer, TaskDefinitionRepository taskDefinitionRepository, TaskConfigurationProperties taskConfigurationProperties, LauncherRepository launcherRepository, List taskPlatforms) { - this(dataSourceProperties, appRegistryService, taskExplorer, taskDefinitionRepository, - taskConfigurationProperties, launcherRepository, taskPlatforms, null); + this(dataSourceProperties, + appRegistryService, + taskExplorer, + taskDefinitionRepository, + taskConfigurationProperties, + launcherRepository, + taskPlatforms, + null); } /** * Initializes the {@link DefaultTaskExecutionInfoService}. * - * @param dataSourceProperties the data source properties. - * @param appRegistryService URI registry this service will use to look up app URIs. - * @param taskExplorer the explorer this service will use to lookup task executions - * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will - * use for task CRUD operations. - * @param taskConfigurationProperties the properties used to define the behavior of tasks - * @param launcherRepository the launcher repository - * @param taskPlatforms the task platforms + * @param dataSourceProperties the data source properties. + * @param appRegistryService URI registry this service will use to look up app URIs. + * @param taskExplorer the explorer this service will use to lookup task executions + * @param taskDefinitionRepository the {@link TaskDefinitionRepository} this service will + * use for task CRUD operations. + * @param taskConfigurationProperties the properties used to define the behavior of tasks + * @param launcherRepository the launcher repository + * @param taskPlatforms the task platforms * @param composedTaskRunnerConfigurationProperties the properties used to define the behavior of CTR */ public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties, AppRegistryService appRegistryService, - TaskExplorer taskExplorer, + AggregateTaskExplorer taskExplorer, TaskDefinitionRepository taskDefinitionRepository, TaskConfigurationProperties taskConfigurationProperties, LauncherRepository launcherRepository, @@ -149,7 +161,7 @@ public DefaultTaskExecutionInfoService(DataSourceProperties dataSourceProperties @Override public TaskExecutionInformation findTaskExecutionInformation(String taskName, - Map taskDeploymentProperties, boolean addDatabaseCredentials, Map previousTaskDeploymentProperties) { + Map taskDeploymentProperties, boolean addDatabaseCredentials, Map previousTaskDeploymentProperties) { Assert.hasText(taskName, "The provided taskName must not be null or empty."); Assert.notNull(taskDeploymentProperties, "The provided runtimeProperties must not be null."); @@ -180,13 +192,11 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, ApplicationType.task, new URI(TaskServiceUtils.getComposedTaskLauncherUri(this.taskConfigurationProperties, this.composedTaskRunnerConfigurationProperties))); - } - catch (URISyntaxException e) { + } catch (URISyntaxException e) { throw new IllegalStateException("Invalid Compose Task Runner Resource", e); } - } - else { + } else { taskDefinitionToUse = TaskServiceUtils.updateTaskProperties(originalTaskDefinition, dataSourceProperties, addDatabaseCredentials); @@ -195,8 +205,7 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, TaskAppNode taskAppNode = taskNode.getTaskApp(); if (taskAppNode.getLabel() != null) { label = taskAppNode.getLabel().stringValue(); - } - else { + } else { label = taskAppNode.getName(); } } @@ -209,8 +218,7 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, if (version == null) { appRegistration = appRegistryService.find(taskDefinitionToUse.getRegisteredAppName(), ApplicationType.task); - } - else { + } else { appRegistration = appRegistryService.find(taskDefinitionToUse.getRegisteredAppName(), ApplicationType.task, version); } @@ -226,6 +234,52 @@ public TaskExecutionInformation findTaskExecutionInformation(String taskName, return taskExecutionInformation; } + @Override + public Set composedTaskChildNames(String taskName) { + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + TaskParser taskParser = new TaskParser(taskDefinition.getTaskName(), taskDefinition.getDslText(), true, true); + Set result = new HashSet<>(); + TaskNode taskNode = taskParser.parse(); + if (taskNode.isComposed()) { + for (TaskApp subTask : taskNode.getTaskApps()) { + logger.debug("subTask:{}:{}:{}", subTask.getName(), subTask.getTaskName(), subTask); + TaskDefinition subTaskDefinition = taskDefinitionRepository.findByTaskName(subTask.getName()); + if (subTaskDefinition != null) { + result.add(subTaskDefinition.getRegisteredAppName() + "," + subTask.getLabel()); + TaskParser subTaskParser = new TaskParser(subTaskDefinition.getTaskName(), subTaskDefinition.getDslText(), true, true); + TaskNode subTaskNode = subTaskParser.parse(); + if (subTaskNode != null && subTaskNode.getTaskApp() != null) { + for (TaskApp subSubTask : subTaskNode.getTaskApps()) { + logger.debug("subSubTask:{}:{}:{}", subSubTask.getName(), subSubTask.getTaskName(), subSubTask); + TaskDefinition subSubTaskDefinition = taskDefinitionRepository.findByTaskName(subSubTask.getName()); + if (subSubTaskDefinition != null) { + if(!subTask.getLabel().contains("$")) { + result.add(subSubTaskDefinition.getRegisteredAppName() + "," + subSubTask.getLabel()); + } else { + result.add(subSubTaskDefinition.getRegisteredAppName()); + } + } + } + } + } else { + if((subTask.getLabel() == null || subTask.getLabel().equals(subTask.getName())) && !subTask.getName().contains("$")) { + result.add(subTask.getName()); + } else { + if(!subTask.getName().contains("$") && !subTask.getLabel().contains("$")) { + result.add(subTask.getName() + "," + subTask.getLabel()); + } else if(!subTask.getName().contains("$")) { + result.add(subTask.getName()); + } else if(!subTask.getTaskName().contains("$")) { + result.add(subTask.getTaskName()); + } + } + } + } + } + return result; + } + + @Override public List createTaskDeploymentRequests(String taskName, String dslText) { List appDeploymentRequests = new ArrayList<>(); TaskParser taskParser = new TaskParser(taskName, dslText, true, true); @@ -254,6 +308,7 @@ public List createTaskDeploymentRequests(String taskName, } return appDeploymentRequests; } + @Override public AllPlatformsTaskExecutionInformation findAllPlatformTaskExecutionInformation() { return new AllPlatformsTaskExecutionInformation(this.taskPlatforms); diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java index 42a431e76d..aadd817d39 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionRepositoryService.java @@ -16,6 +16,10 @@ package org.springframework.cloud.dataflow.server.service.impl; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.service.TaskExecutionCreationService; import org.springframework.cloud.task.repository.TaskExecution; import org.springframework.cloud.task.repository.TaskRepository; @@ -30,17 +34,29 @@ @Transactional public class DefaultTaskExecutionRepositoryService implements TaskExecutionCreationService { - private TaskRepository taskRepository; + private final TaskRepositoryContainer taskRepositoryContainer; + private final AggregateExecutionSupport aggregateExecutionSupport; - public DefaultTaskExecutionRepositoryService(TaskRepository taskRepository) { - Assert.notNull(taskRepository, "taskRepository must not be null"); - this.taskRepository = taskRepository; + private final TaskDefinitionReader taskDefinitionReader; + + public DefaultTaskExecutionRepositoryService( + TaskRepositoryContainer taskRepositoryContainer, + AggregateExecutionSupport aggregateExecutionSupport, + TaskDefinitionReader taskDefinitionReader + ) { + Assert.notNull(taskRepositoryContainer, "taskRepository must not be null"); + Assert.notNull(aggregateExecutionSupport, "aggregateExecutionSupport must not be null"); + Assert.notNull(taskDefinitionReader, "taskDefinitionReader must not be null"); + this.taskRepositoryContainer = taskRepositoryContainer; + this.aggregateExecutionSupport = aggregateExecutionSupport; + this.taskDefinitionReader = taskDefinitionReader; } @Override @Transactional(propagation = Propagation.REQUIRES_NEW) - // TODO pass the SchemaVersionTarget public TaskExecution createTaskExecution(String taskName) { + SchemaVersionTarget schemaVersionTarget = this.aggregateExecutionSupport.findSchemaVersionTarget(taskName, taskDefinitionReader); + TaskRepository taskRepository = this.taskRepositoryContainer.get(schemaVersionTarget.getName()); return taskRepository.createTaskExecution(taskName); } } diff --git a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java index ac37b65270..7d8cd81fb1 100644 --- a/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java +++ b/spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/service/impl/DefaultTaskExecutionService.java @@ -18,6 +18,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -35,9 +36,15 @@ import org.slf4j.LoggerFactory; import org.springframework.cloud.common.security.core.support.OAuth2TokenUtilsService; +import org.springframework.cloud.dataflow.aggregate.task.AggregateExecutionSupport; +import org.springframework.cloud.dataflow.aggregate.task.AggregateTaskExplorer; +import org.springframework.cloud.dataflow.aggregate.task.DataflowTaskExecutionQueryDao; +import org.springframework.cloud.dataflow.aggregate.task.TaskDefinitionReader; +import org.springframework.cloud.dataflow.aggregate.task.TaskRepositoryContainer; import org.springframework.cloud.dataflow.audit.service.AuditRecordService; import org.springframework.cloud.dataflow.core.AuditActionType; import org.springframework.cloud.dataflow.core.AuditOperationType; +import org.springframework.cloud.dataflow.core.LaunchResponse; import org.springframework.cloud.dataflow.core.Launcher; import org.springframework.cloud.dataflow.core.TaskDefinition; import org.springframework.cloud.dataflow.core.TaskDeployment; @@ -48,11 +55,16 @@ import org.springframework.cloud.dataflow.core.dsl.visitor.ComposedTaskRunnerVisitor; import org.springframework.cloud.dataflow.rest.util.ArgumentSanitizer; import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils; +import org.springframework.cloud.dataflow.schema.AggregateTaskExecution; +import org.springframework.cloud.dataflow.schema.SchemaVersionTarget; import org.springframework.cloud.dataflow.server.job.LauncherRepository; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionDaoContainer; import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDao; +import org.springframework.cloud.dataflow.server.repository.DataflowTaskExecutionMetadataDaoContainer; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskDefinitionException; import org.springframework.cloud.dataflow.server.repository.NoSuchTaskExecutionException; +import org.springframework.cloud.dataflow.server.repository.TaskDefinitionRepository; import org.springframework.cloud.dataflow.server.repository.TaskDeploymentRepository; import org.springframework.cloud.dataflow.server.repository.TaskExecutionMissingExternalIdException; import org.springframework.cloud.dataflow.server.service.TaskExecutionCreationService; @@ -67,7 +79,6 @@ import org.springframework.cloud.task.listener.TaskException; import org.springframework.cloud.task.listener.TaskExecutionException; import org.springframework.cloud.task.repository.TaskExecution; -import org.springframework.cloud.task.repository.TaskExplorer; import org.springframework.cloud.task.repository.TaskRepository; import org.springframework.core.io.Resource; import org.springframework.data.domain.Page; @@ -91,6 +102,7 @@ * @author Michael Wirth * @author David Turanski * @author Daniel Serleg + * @author Corneil du Plessis */ @Transactional public class DefaultTaskExecutionService implements TaskExecutionService { @@ -117,7 +129,7 @@ public class DefaultTaskExecutionService implements TaskExecutionService { /** * Used to create TaskExecutions. */ - private final TaskRepository taskRepository; + private final TaskRepositoryContainer taskRepositoryContainer; private final TaskExecutionInfoService taskExecutionInfoService; @@ -127,13 +139,17 @@ public class DefaultTaskExecutionService implements TaskExecutionService { private final TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator; - private final TaskExplorer taskExplorer; + private final AggregateTaskExplorer taskExplorer; - private final DataflowTaskExecutionDao dataflowTaskExecutionDao; + private final DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer; - private final DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao; + private final DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer; - private OAuth2TokenUtilsService oauth2TokenUtilsService; + private final OAuth2TokenUtilsService oauth2TokenUtilsService; + + private final TaskDefinitionRepository taskDefinitionRepository; + + private final TaskDefinitionReader taskDefinitionReader; private final Map> tasksBeingUpgraded = new ConcurrentHashMap<>(); @@ -143,11 +159,16 @@ public class DefaultTaskExecutionService implements TaskExecutionService { private boolean autoCreateTaskDefinitions; - private TaskConfigurationProperties taskConfigurationProperties; + private final TaskConfigurationProperties taskConfigurationProperties; + + private final ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + + private final AggregateExecutionSupport aggregateExecutionSupport; - private ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties; + private final DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao; private static final Pattern TASK_NAME_PATTERN = Pattern.compile("[a-zA-Z]([-a-zA-Z0-9]*[a-zA-Z0-9])?"); + private static final String TASK_NAME_VALIDATION_MSG = "Task name must consist of alphanumeric characters " + "or '-', start with an alphabetic character, and end with an alphanumeric character (e.g. 'my-name', " + "or 'abc-123')"; @@ -155,114 +176,153 @@ public class DefaultTaskExecutionService implements TaskExecutionService { /** * Initializes the {@link DefaultTaskExecutionService}. * - * @param launcherRepository the repository of task launcher used to launch task apps. - * @param auditRecordService the audit record service - * @param taskRepository the repository to use for accessing and updating task executions - * @param taskExecutionInfoService the task execution info service - * @param taskDeploymentRepository the repository to track task deployment - * @param taskExecutionInfoService the service used to setup a task execution - * @param taskExecutionRepositoryService the service used to create the task execution - * @param taskAppDeploymentRequestCreator the task app deployment request creator - * @param taskExplorer the task explorer - * @param dataflowTaskExecutionDao the dataflow task execution dao - * @param dataflowTaskExecutionMetadataDao repository used to manipulate task manifests - * @param oauth2TokenUtilsService the oauth2 token server - * @param taskSaveService the task save service - * @param taskConfigurationProperties task configuration properties. + * @param launcherRepository the repository of task launcher used to launch task apps. + * @param auditRecordService the audit record service + * @param taskRepositoryContainer the container of repositories to use for accessing and updating task executions + * @param taskDeploymentRepository the repository to track task deployment + * @param taskDefinitionRepository the repository to query the task definition + * @param taskDefinitionReader use task definition repository to retrieve definition + * @param taskExecutionInfoService the service used to setup a task execution + * @param taskExecutionRepositoryService the service used to create the task execution + * @param taskAppDeploymentRequestCreator the task app deployment request creator + * @param taskExplorer the task explorer + * @param dataflowTaskExecutionDaoContainer the dataflow task execution dao + * @param dataflowTaskExecutionMetadataDaoContainer repository used to manipulate task manifests + * @param dataflowTaskExecutionQueryDao repository to query aggregate TaskExecution data + * @param aggregateExecutionSupport support for selecting SchemaVersionTarget + * @param oauth2TokenUtilsService the oauth2 token server + * @param taskSaveService the task save service + * @param taskConfigurationProperties task configuration properties. */ @Deprecated - public DefaultTaskExecutionService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService, - TaskConfigurationProperties taskConfigurationProperties) { - this(launcherRepository, auditRecordService, taskRepository, taskExecutionInfoService, taskDeploymentRepository, - taskExecutionRepositoryService, taskAppDeploymentRequestCreator, taskExplorer, dataflowTaskExecutionDao, - dataflowTaskExecutionMetadataDao, oauth2TokenUtilsService, taskSaveService, taskConfigurationProperties, + public DefaultTaskExecutionService( + LauncherRepository launcherRepository, + AuditRecordService auditRecordService, + TaskRepositoryContainer taskRepositoryContainer, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeploymentRepository taskDeploymentRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionCreationService taskExecutionRepositoryService, + TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, + AggregateTaskExplorer taskExplorer, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, + OAuth2TokenUtilsService oauth2TokenUtilsService, + TaskSaveService taskSaveService, + TaskConfigurationProperties taskConfigurationProperties, + AggregateExecutionSupport aggregateExecutionSupport + ) { + this(launcherRepository, + auditRecordService, + taskRepositoryContainer, + taskExecutionInfoService, + taskDeploymentRepository, + taskDefinitionRepository, + taskDefinitionReader, + taskExecutionRepositoryService, + taskAppDeploymentRequestCreator, + taskExplorer, + dataflowTaskExecutionDaoContainer, + dataflowTaskExecutionMetadataDaoContainer, + dataflowTaskExecutionQueryDao, + oauth2TokenUtilsService, + taskSaveService, + taskConfigurationProperties, + aggregateExecutionSupport, null); } /** * Initializes the {@link DefaultTaskExecutionService}. * - * @param launcherRepository the repository of task launcher used to launch task apps. - * @param auditRecordService the audit record service - * @param taskRepository the repository to use for accessing and updating task executions - * @param taskExecutionInfoService the task execution info service - * @param taskDeploymentRepository the repository to track task deployment - * @param taskExecutionInfoService the service used to setup a task execution - * @param taskExecutionRepositoryService the service used to create the task execution - * @param taskAppDeploymentRequestCreator the task app deployment request creator - * @param taskExplorer the task explorer - * @param dataflowTaskExecutionDao the dataflow task execution dao - * @param dataflowTaskExecutionMetadataDao repository used to manipulate task manifests - * @param oauth2TokenUtilsService the oauth2 token server - * @param taskSaveService the task save service + * @param launcherRepository the repository of task launcher used to launch task apps. + * @param auditRecordService the audit record service + * @param taskRepositoryContainer the container of repositories to use for accessing and updating task executions + * @param taskExecutionInfoService the task execution info service + * @param taskDeploymentRepository the repository to track task deployment + * @param taskDefinitionRepository the repository to query the task definition + * @param taskDefinitionReader uses task definition repository to retrieve definition + * @param taskExecutionRepositoryService the service used to create the task execution + * @param taskAppDeploymentRequestCreator the task app deployment request creator + * @param taskExplorer the task explorer + * @param dataflowTaskExecutionDaoContainer the dataflow task execution dao + * @param dataflowTaskExecutionMetadataDaoContainer repository used to manipulate task manifests + * @param dataflowTaskExecutionQueryDao repository to query aggregate task execution data. + * @param aggregateExecutionSupport support for selecting SchemaVersionTarget. + * @param oauth2TokenUtilsService the oauth2 token server + * @param taskSaveService the task save service * @param composedTaskRunnerConfigurationProperties properties used to configure the composed task runner - * @param taskConfigurationProperties task configuration properties + * @param taskConfigurationProperties task configuration properties */ - public DefaultTaskExecutionService(LauncherRepository launcherRepository, - AuditRecordService auditRecordService, - TaskRepository taskRepository, - TaskExecutionInfoService taskExecutionInfoService, - TaskDeploymentRepository taskDeploymentRepository, - TaskExecutionCreationService taskExecutionRepositoryService, - TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, - TaskExplorer taskExplorer, - DataflowTaskExecutionDao dataflowTaskExecutionDao, - DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao, - OAuth2TokenUtilsService oauth2TokenUtilsService, - TaskSaveService taskSaveService, - TaskConfigurationProperties taskConfigurationProperties, - ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties) { + public DefaultTaskExecutionService( + LauncherRepository launcherRepository, + AuditRecordService auditRecordService, + TaskRepositoryContainer taskRepositoryContainer, + TaskExecutionInfoService taskExecutionInfoService, + TaskDeploymentRepository taskDeploymentRepository, + TaskDefinitionRepository taskDefinitionRepository, + TaskDefinitionReader taskDefinitionReader, + TaskExecutionCreationService taskExecutionRepositoryService, + TaskAppDeploymentRequestCreator taskAppDeploymentRequestCreator, + AggregateTaskExplorer taskExplorer, + DataflowTaskExecutionDaoContainer dataflowTaskExecutionDaoContainer, + DataflowTaskExecutionMetadataDaoContainer dataflowTaskExecutionMetadataDaoContainer, + DataflowTaskExecutionQueryDao dataflowTaskExecutionQueryDao, + OAuth2TokenUtilsService oauth2TokenUtilsService, + TaskSaveService taskSaveService, + TaskConfigurationProperties taskConfigurationProperties, + AggregateExecutionSupport aggregateExecutionSupport, + ComposedTaskRunnerConfigurationProperties composedTaskRunnerConfigurationProperties + ) { Assert.notNull(launcherRepository, "launcherRepository must not be null"); Assert.notNull(auditRecordService, "auditRecordService must not be null"); Assert.notNull(taskExecutionInfoService, "taskExecutionInfoService must not be null"); - Assert.notNull(taskRepository, "taskRepository must not be null"); + Assert.notNull(taskRepositoryContainer, "taskRepositoryContainer must not be null"); Assert.notNull(taskExecutionInfoService, "taskExecutionInfoService must not be null"); Assert.notNull(taskDeploymentRepository, "taskDeploymentRepository must not be null"); Assert.notNull(taskExecutionRepositoryService, "taskExecutionRepositoryService must not be null"); Assert.notNull(taskAppDeploymentRequestCreator, "taskAppDeploymentRequestCreator must not be null"); Assert.notNull(taskExplorer, "taskExplorer must not be null"); - Assert.notNull(dataflowTaskExecutionDao, "dataflowTaskExecutionDao must not be null"); - Assert.notNull(dataflowTaskExecutionMetadataDao, "dataflowTaskExecutionMetadataDao must not be null"); + Assert.notNull(dataflowTaskExecutionDaoContainer, "dataflowTaskExecutionDaoContainer must not be null"); + Assert.notNull(dataflowTaskExecutionMetadataDaoContainer, "dataflowTaskExecutionMetadataDaoContainer must not be null"); Assert.notNull(taskSaveService, "taskSaveService must not be null"); Assert.notNull(taskConfigurationProperties, "taskConfigurationProperties must not be null"); - + Assert.notNull(aggregateExecutionSupport, "compositeExecutionSupport must not be null"); + Assert.notNull(taskDefinitionRepository, "taskDefinitionRepository must not be null"); + Assert.notNull(taskDefinitionReader, "taskDefinitionReader must not be null"); this.oauth2TokenUtilsService = oauth2TokenUtilsService; this.launcherRepository = launcherRepository; this.auditRecordService = auditRecordService; - this.taskRepository = taskRepository; + this.taskRepositoryContainer = taskRepositoryContainer; this.taskExecutionInfoService = taskExecutionInfoService; this.taskDeploymentRepository = taskDeploymentRepository; + this.taskDefinitionRepository = taskDefinitionRepository; + this.taskDefinitionReader = taskDefinitionReader; this.taskExecutionRepositoryService = taskExecutionRepositoryService; this.taskAppDeploymentRequestCreator = taskAppDeploymentRequestCreator; this.taskExplorer = taskExplorer; - this.dataflowTaskExecutionDao = dataflowTaskExecutionDao; - this.dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDao; + this.dataflowTaskExecutionDaoContainer = dataflowTaskExecutionDaoContainer; + this.dataflowTaskExecutionMetadataDaoContainer = dataflowTaskExecutionMetadataDaoContainer; this.taskSaveService = taskSaveService; this.taskConfigurationProperties = taskConfigurationProperties; + this.aggregateExecutionSupport = aggregateExecutionSupport; this.composedTaskRunnerConfigurationProperties = composedTaskRunnerConfigurationProperties; + this.dataflowTaskExecutionQueryDao = dataflowTaskExecutionQueryDao; } /** * Launch a task. - * @param taskName Name of the task definition or registered task application. - * If a task definition does not exist, one will be created if `autoCreateTask-Definitions` is true. Must not be null or empty. + * + * @param taskName Name of the task definition or registered task application. + * If a task definition does not exist, one will be created if `autoCreateTask-Definitions` is true. Must not be null or empty. * @param taskDeploymentProperties Optional deployment properties. Must not be null. - * @param commandLineArgs Optional runtime commandline argument + * @param commandLineArgs Optional runtime commandline argument * @return the task execution ID. */ @Override - public long executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs) { + public LaunchResponse executeTask(String taskName, Map taskDeploymentProperties, List commandLineArgs) { // Get platform name and fallback to 'default' String platformName = getPlatform(taskDeploymentProperties); String platformType = StreamSupport.stream(launcherRepository.findAll().spliterator(), true) @@ -274,26 +334,24 @@ public long executeTask(String taskName, Map taskDeploymentPrope throw new TaskException(String.format("Task name %s is invalid. %s", taskName, TASK_NAME_VALIDATION_MSG)); } // Naive local state to prevent parallel launches to break things up - if(this.tasksBeingUpgraded.containsKey(taskName)) { + if (this.tasksBeingUpgraded.containsKey(taskName)) { List platforms = this.tasksBeingUpgraded.get(taskName); - if(platforms.contains(platformName)) { + if (platforms.contains(platformName)) { throw new IllegalStateException(String.format( "Unable to launch %s on platform %s because it is being upgraded", taskName, platformName)); } } Launcher launcher = this.launcherRepository.findByName(platformName); - if(launcher == null) { + if (launcher == null) { throw new IllegalStateException(String.format("No launcher was available for platform %s", platformName)); } validateTaskName(taskName, launcher); // Remove since the key for task platform name will not pass validation for app, // deployer, or scheduler prefix. // Then validate - if (taskDeploymentProperties.containsKey(TASK_PLATFORM_NAME)) { - taskDeploymentProperties.remove(TASK_PLATFORM_NAME); - } - DeploymentPropertiesUtils.validateDeploymentProperties(taskDeploymentProperties); - + Map deploymentProperties = new HashMap<>(taskDeploymentProperties); + deploymentProperties.remove(TASK_PLATFORM_NAME); + DeploymentPropertiesUtils.validateDeploymentProperties(deploymentProperties); TaskDeployment existingTaskDeployment = taskDeploymentRepository .findTopByTaskDefinitionNameOrderByCreatedOnAsc(taskName); if (existingTaskDeployment != null) { @@ -304,36 +362,69 @@ public long executeTask(String taskName, Map taskDeploymentPrope taskName, existingTaskDeployment.getPlatformName(), platformName)); } } + List commandLineArguments = new ArrayList<>(commandLineArgs); + TaskDefinition taskDefinition = taskDefinitionRepository.findByTaskName(taskName); + String taskAppName = taskDefinition != null ? taskDefinition.getRegisteredAppName() : taskName; + SchemaVersionTarget schemaVersionTarget = aggregateExecutionSupport.findSchemaVersionTarget(taskAppName, taskDefinitionReader); + Assert.notNull(schemaVersionTarget, "schemaVersionTarget not found for " + taskAppName); + + DataflowTaskExecutionMetadataDao dataflowTaskExecutionMetadataDao = dataflowTaskExecutionMetadataDaoContainer.get(schemaVersionTarget.getName()); // Get the previous manifest - TaskManifest previousManifest = this.dataflowTaskExecutionMetadataDao.getLatestManifest(taskName); + TaskManifest previousManifest = dataflowTaskExecutionMetadataDao.getLatestManifest(taskName); Map previousTaskDeploymentProperties = previousManifest != null && previousManifest.getTaskDeploymentRequest() != null && previousManifest.getTaskDeploymentRequest().getDeploymentProperties() != null - ? previousManifest.getTaskDeploymentRequest().getDeploymentProperties() - : Collections.emptyMap(); + ? previousManifest.getTaskDeploymentRequest().getDeploymentProperties() + : Collections.emptyMap(); TaskExecutionInformation taskExecutionInformation = findOrCreateTaskExecutionInformation(taskName, - taskDeploymentProperties, launcher.getType(), previousTaskDeploymentProperties); + deploymentProperties, launcher.getType(), previousTaskDeploymentProperties); + + // pre prosess command-line args + // moving things like app.