diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index efc139b19..201cee7e2 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -49,8 +49,14 @@ jobs: - uses: actions/checkout@v3 with: submodules: true + - name: Set up JDK ${{ matrix.java }} + uses: actions/setup-java@v3 + with: + java-version: 8 + distribution: 'temurin' + cache: 'maven' - name: Check code style - run: ./mvnw --batch-mode --quiet --no-snapshot-updates clean checkstyle:check + run: ./mvnw --batch-mode --quiet --no-snapshot-updates clean spotless:check dead-link: if: github.repository == 'apache/incubator-seatunnel-web' @@ -125,7 +131,7 @@ jobs: cache: 'maven' - name: Install run: >- - ./mvnw -B -q install -DskipTests + ./mvnw -B -q install -DskipTests -P release -D"maven.test.skip"=true -D"maven.javadoc.skip"=true -D"checkstyle.skip"=true diff --git a/.gitignore b/.gitignore index 1197e9bdb..9bf8b904b 100644 --- a/.gitignore +++ b/.gitignore @@ -48,3 +48,7 @@ test.conf spark-warehouse *.flattened-pom.xml /seatunnel-ui/package-lock.json +/seatunnel-ui/node +/seatunnel-ui/node/* +/seatunnel-ui/node_modules +/seatunnel-ui/node_modules/* diff --git a/.licenserc.yaml b/.licenserc.yaml index ec23eba6e..052f0bc1a 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -41,5 +41,6 @@ header: - '**/.gitkeep' - '**/com/typesafe/config/**' - 'seatunnel-main-repository/**' + - 'seatunnel-web-dist/release-docs/**' comment: on-failure diff --git a/README.md b/README.md index 304448308..6353d46df 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Apache SeaTunnel (Incubating) +# Apache SeaTunnel seatunnel logo @@ -21,90 +21,128 @@ Click it if your want to know more about our design. 👉🏻[Design](https://gi ## How to start -First, we need clone this project from Github. +### 1 Preparing the Apache DolphinScheduler environment -```shell -git clone https://github.com/apache/incubator-seatunnel-web.git -``` +#### 1.1 Install Apache DolphinScheduler + +If you already have Apache DolphinScheduler environment, you can skip this step and go to [Create Tenant and User for SeaTunnel Web](#1.2 Create Tenant and User for SeaTunnel Web) + +Because running SeaTunnel Web must rely on the DolphinScheduler, if you do not have a DS environment, you need to first install and deploy a DolphinScheduler (hereinafter referred to as DS). Taking DS version 3.1.5 as an example. + +Reference `https://dolphinscheduler.apache.org/zh-cn/docs/3.1.5/guide/installation/standalone` to install a standalone DS. + +#### 1.2 Create Tenant and User for SeaTunnel Web + +If you already have a DS environment and decide to use existing users and tenants for SeaTunnel Web, you can skip this step and go to [Create Project for SeaTunnel Web](#1.3 Create Project for SeaTunnel Web). + +Because SeaTunnel Web needs to call the interface of DS to create workflows and tasks, it is necessary to submit the projects, users, and tenants created in DS for SeaTunnel to use. + +1. Create Tenant + +"Security" -> "Tenant Manage" -> "Create Tenant" + +![image](docs/images/ds_create_tenant.png) + +2. For simplicity, use the default user admin of DS directly here + +#### 1.3 Create Project for SeaTunnel Web + +![image](docs/images/ds_create_project.png) + +#### 1.4 Create Token for SeaTunnel Web + +![image](docs/images/ds_create_token.png) + +### 2 Run SeaTunnel Web in IDEA + +If you want to deploy and run SeaTunnel Web, Please turn to [3 Run SeaTunnel Web In Server](#3 Run SeaTunnel Web In Server) + +#### 2.1 Init database + +1. Edit `whaletunnel-server/whaletunnel-app/src/main/resources/script/seatunnel_server_env.sh` file, Complete the installed database address, port, username, and password. Here is an example: + + ``` + export HOSTNAME="localhost" + export PORT="3306" + export USERNAME="root" + export PASSWORD="123456" + ``` +2. Run init shell `sh seatunnel-server/seatunnel-app/src/main/resources/script/init_sql.sh` If there are no errors during operation, it indicates successful initialization. + +#### 2.2 Config application and Run SeaTunnel Web Backend Server + +1. Edit `seatunnel-server/seatunnel-app/src/main/resources/application.yml` Fill in the database connection information and DS interface related information in the file. + +![image](docs/images/application_config.png) + +2. Run `seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java` If there are no errors reported, the seatunnel web backend service is successfully started. + +#### 2.3 Run SeaTunnel Web Front End -Then, setup up configuration of db and more. -```shell -vim seatunnel-server/seatunnel-app/src/main/resources/application.yml ``` +cd seatunnel-ui +npm install +npm run dev -Notice: -At present, we only support the following scheduler systems: dolphinscheduler, more scheduler systems will be supported in the future; -And for easier use, we plan to build our own scheduling system in Seatunnel. - -Here is a sample parameter configuration for Seatunnel integration dolphinscheduler: -```yaml -ds: - script: - # The path where the script is stored - dir: /dj - project: - # The default project name of dolphinscheduler - default: test_dj - tenant: - # Which tenant been used to submit script - default: default - api: - # The dolphinscheduler user token - token: 12345678 - # The dolphinscheduler api prefix address - prefix: http://127.0.0.1:12345/dolphinscheduler ``` -Now comes the crucial part, this is about your account security, please modify the Jwt secret key and algorithm. +If there are no issues with the operation, the following information will be displayed: -```yaml -jwt: - expireTime: 86400 - secretKey: https://github.com/apache/incubator-seatunnel - algorithm: HS256 ``` + ➜ Local: http://127.0.0.1:5173/ + ➜ Network: use --host to expose + ➜ press h to show help - -Next, execute sql to create table .(Your must create database first by yourself) -```shell -# Replace `username` & `dbName` with the real username and database name. -# We will provided script in future. -mysql -u username -p [dbName] < bin/seatunnl.sql ``` -Now, you've done all the preparatory work, launch our app. +Accessing in a browser http://127.0.0.1:5173/login Okay, the default username and password are admin/admin. -### Launch it in IntelliJ IDEA +### 3 Run SeaTunnel Web In Server -Starting the back end of St in idea is really simple, just run the main method of `SeatunnelApplication.java` in the `seatunnel-app` module. -And the log will tell u anything you need to know. +#### 3.1 Build Install Package From Code -### Start it in the command line +``` +cd incubator-seatunnel-web +sh build.sh code +``` -```shell -# start backend +Then you can find the installer package in dir `incubator-seatunnel-web/seatunnel-web-dist/target/apache-seatunnel-web-${project.version}.tar.gz`. -# for build code -sh build.sh code +#### 3.2 Install -# for build image -sh build.sh image - -# and then start docker container -docker run apache/seatunnel-web +Copy the `apache-seatunnel-web-${project.version}.tar.gz` to your server node and unzip it. +```shell +tar -zxvf apache-seatunnel-web-${project.version}.tar.gz ``` -### start frontend -You can use a Web server such as Apache HTTP Server or Nginx to start front-end applications. Deploy the built front-end code to the root directory of the Web server, start the Web server, and enter the URL of the Web server in a browser to access the application. +#### 3.3 Init database + +1. Edit `apache-seatunnel-web-${project.version}/script/seatunnel_server_env.sh` file, Complete the installed database address, port, username, and password. Here is an example: + + ``` + export HOSTNAME="localhost" + export PORT="3306" + export USERNAME="root" + export PASSWORD="123456" + ``` +2. Run init shell `sh apache-seatunnel-web-${project.version}/script/init_sql.sh` If there are no errors during operation, it indicates successful initialization. + +#### 3.4 Config application and Run SeaTunnel Web Backend Server + +Edit `apache-seatunnel-web-${project.version}/config/application.yml` Fill in the database connection information and DS interface related information in the file. + +![image](docs/images/application_config.png) + +#### 3.5 Start SeaTunnel Web -If you want start in dev mode: ```shell -cd seatunnel-ui -npm install -npm run dev +cd apache-seatunnel-web-${project.version} +sh bin/seatunnel-backend-daemon.sh start ``` +Accessing in a browser http://127.0.0.1:8801/ui/ Okay, the default username and password are admin/admin. + ### How to use it After all the pre-work is done, we can open the following URL: 127.0.0.1:7890(please replace it according to your configuration) to use it. diff --git a/build.sh b/build.sh index d48afbf41..da4f8b1df 100644 --- a/build.sh +++ b/build.sh @@ -29,7 +29,7 @@ DOCKER_VERSION=1.0.0-snapshot code() { /bin/sh $WORKDIR/mvnw clean package -DskipTests # mv release zip - mv $WORKDIR/seatunnel-server/seatunnel-app/target/seatunnel-web.zip $WORKDIR/ + mv $WORKDIR/seatunnel-web-dist/target/apache-seatunnel-web-1.0.0-SNAPSHOT.zip $WORKDIR/ } # build image diff --git a/docs/images/application_config.png b/docs/images/application_config.png new file mode 100644 index 000000000..eac17bae6 Binary files /dev/null and b/docs/images/application_config.png differ diff --git a/docs/images/ds_create_project.png b/docs/images/ds_create_project.png new file mode 100644 index 000000000..71bb25168 Binary files /dev/null and b/docs/images/ds_create_project.png differ diff --git a/docs/images/ds_create_tenant.png b/docs/images/ds_create_tenant.png new file mode 100644 index 000000000..45dfe3363 Binary files /dev/null and b/docs/images/ds_create_tenant.png differ diff --git a/docs/images/ds_create_token.png b/docs/images/ds_create_token.png new file mode 100644 index 000000000..539c0bdf1 Binary files /dev/null and b/docs/images/ds_create_token.png differ diff --git a/docs/images/ds_create_user.png b/docs/images/ds_create_user.png new file mode 100644 index 000000000..5117f398b Binary files /dev/null and b/docs/images/ds_create_user.png differ diff --git a/pom.xml b/pom.xml index e5195ce02..ef6b0822e 100644 --- a/pom.xml +++ b/pom.xml @@ -13,8 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. --> - 4.0.0 @@ -26,56 +25,21 @@ org.apache.seatunnel seatunnel-web + ${revision} pom - 1.0.0-SNAPSHOT SeaTunnel - - Production ready big data processing product based on Apache Spark and Apache Flink. - - - https://github.com/apache/incubator-seatunnel - - - - The Apache License, Version 2.0 - https://www.apache.org/licenses/LICENSE-2.0.txt - - - - - scm:git:https://github.com/apache/incubator-seatunnel.git - scm:git:https://github.com/apache/incubator-seatunnel.git - https://github.com/apache/incubator-seatunnel - HEAD - - - - GitHub - https://github.com/apache/incubator-seatunnel/issues - - - - - SeaTunnel Developer List - dev@seatunnel.apache.org - dev-subscribe@seatunnel.apache.org - dev-unsubscribe@seatunnel.apache.org - - - SeaTunnel Commits List - commits@seatunnel.apache.org - commits-subscribe@seatunnel.apache.org - commits-unsubscribe@seatunnel.apache.org - - + Production ready big data processing product based on Apache Spark and Apache Flink. - seatunnel-server + seatunnel-server + seatunnel-datasource + seatunnel-web-dist + 1.0.0-SNAPSHOT ${java.version} ${java.version} 1.8 @@ -87,7 +51,6 @@ 2.22.2 2.22.2 2.9.1 - 3.1.2 3.10.1 3.3.0 3.2.0 @@ -95,15 +58,11 @@ 1.20 1.9.5 3.1.0 - true - 1.18.0 + 3.3.0 false - - 2.1.3 - 1.2 - 1.7.25 - 2.12.7.1 + 2.12.7.1 + 2.12.7 8.0.16 2.1.214 5.9.0 @@ -111,82 +70,805 @@ 3.4 19.0 3.10.0 - 2.17.1 4.2.0 + 2.4.7-WS-SNAPSHOT + 2.1.0.9 + 3.1.4 + 1.11.271 + 1.0.1 + 1.18.24 + true + 3.1.1 + 1.3.0 + + 2.6.8 + 5.3.20 + 3.5.3.1 + 1.2.9 + 2.6.1 + 1.5.10 + 6.2.2.Final + 1.3.2 + 1.14.3 + 0.10.7 + 9.1.6 + 2.11.0 + 3.0.0 + 1.0.1 + 2.3.1 + 3.1.4 + 1.11.271 + 2.29.0 + 1.2.11 + 2.17.1 + 1.2.3 + 1.2 + 1.2.17 + 2.17.1 + 1.7.25 + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + + ch.qos.logback + logback-classic + ${logback.version} + + + ch.qos.logback + logback-core + ${logback.version} + + + + + + org.slf4j + jcl-over-slf4j + ${slf4j.version} + + + + + + + + org.slf4j + log4j-over-slf4j + ${slf4j.version} + + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + provided + + + + org.apache.logging.log4j + log4j-to-slf4j + ${log4j2.version} + + + + org.slf4j + slf4j-jdk14 + ${slf4j.version} + provided + + + + org.slf4j + slf4j-jcl + ${slf4j.version} + provided + + + + org.slf4j + slf4j-nop + ${slf4j.version} + provided + + + + org.slf4j + slf4j-simple + ${slf4j.version} + provided + + + + org.slf4j + slf4j-reload4j + ${slf4j.version} + provided + + + + commons-logging + commons-logging + ${commons-logging.version} + provided + + + log4j + log4j + ${log4j.version} + provided + + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + provided + + + org.apache.logging.log4j + log4j-api + ${log4j2.version} + + + org.apache.logging.log4j + log4j-core + ${log4j2.version} + provided + + + + org.apache.logging.log4j + log4j-1.2-api + ${log4j2.version} + provided + + + + + + + org.apache.seatunnel + seatunnel-common + ${seatunnel-framework.version} + + + + org.apache.seatunnel + seatunnel-jackson + ${seatunnel-framework.version} + optional + + + com.fasterxml.jackson.dataformat + jackson-dataformat-properties + + + + + + org.apache.seatunnel + seatunnel-api + ${seatunnel-framework.version} + + + + org.apache.seatunnel + seatunnel-engine-client + ${seatunnel-framework.version} + + + + com.google.auto.service + auto-service + ${auto-service.version} + provided + + + + org.apache.seatunnel + seatunnel-plugin-discovery + ${seatunnel-framework.version} + + + + org.apache.seatunnel + seatunnel-transforms-v2 + ${seatunnel-framework.version} + + + + org.projectlombok + lombok + ${lombok.version} + provided + + + + + org.springframework.boot + spring-boot-starter-web + ${spring-boot.version} + + + org.springframework.boot + spring-boot-starter-jetty + ${spring-boot.version} + + + org.springframework.boot + spring-boot-starter-aop + ${spring-boot.version} + + + org.springframework.boot + spring-boot-starter-jdbc + ${spring-boot.version} + + + org.springframework.boot + spring-boot-starter-test + ${spring-boot.version} + test + + + com.google.code.gson + gson + 2.8.6 + + + com.alibaba + druid-spring-boot-starter + ${druid-spring-boot-starter.version} + + + + + com.baomidou + mybatis-plus-boot-starter + ${mybatis-plus-boot-starter.version} + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-autoconfigure + + + + + org.hibernate.validator + hibernate-validator + ${hibernate.validator.version} + + + + + io.springfox + springfox-swagger2 + ${springfox-swagger.version} + + + io.springfox + springfox-swagger-ui + ${springfox-swagger.version} + + + io.swagger + swagger-annotations + ${swagger-annotations.version} + + + + + io.jsonwebtoken + jjwt-api + ${jwt.version} + + + io.jsonwebtoken + jjwt-impl + ${jwt.version} + runtime + + + io.jsonwebtoken + jjwt-jackson + ${jwt.version} + runtime + + + + + org.jsoup + jsoup + ${jsoup.version} + + + + + org.apache.seatunnel + connector-common + ${seatunnel-framework.version} + test + + + + org.apache.seatunnel + connector-console + ${seatunnel-framework.version} + test + + + + org.apache.seatunnel + connector-fake + ${seatunnel-framework.version} + test + + + + org.apache.seatunnel + connector-jdbc + ${seatunnel-framework.version} + test + + + + org.apache.commons + commons-lang3 + ${commons-lang3.version} + + + commons-io + commons-io + ${commons-io.version} + + + org.apache.commons + commons-collections4 + ${commons-collections4.version} + + + org.junit + junit-bom + ${junit.version} + pom + import + + + + com.fasterxml.jackson.core + jackson-databind + ${jackson-databind.version} + + + + com.google.guava + guava + ${guava.version} + + + + org.checkerframework + checker-qual + ${checker.qual.version} + + + + org.awaitility + awaitility + ${awaitility.version} + test + + + + org.apache.seatunnel + seatunnel-hadoop3-3.1.4-uber + ${hadoop-uber.version} + + + org.apache.avro + avro + + + + + org.apache.hadoop + hadoop-aws + ${hadoop-aws.version} + provided + + + jdk.tools + jdk.tools + + + + + com.amazonaws + aws-java-sdk-bundle + ${aws-java-sdk-bundle.version} + provided + + + com.cronutils + cron-utils + ${cron-utils.version} + + + org.javassist + javassist + + + + + org.apache.seatunnel + seatunnel-datasource-client + ${project.version} + + + com.google.auto.service + auto-service-annotations + ${auto-service-annotation.version} + compile + + + + org.apache.seatunnel + datasource-s3 + ${project.version} + provided + + + + com.google.code.findbugs + jsr305 + ${jsr305.version} + + + + + org.apache.seatunnel + connector-common + ${seatunnel-framework.version} + test + + + + org.apache.seatunnel + seatunnel-transforms-v2 + ${seatunnel-framework.version} + + + + org.apache.seatunnel + connector-console + ${seatunnel-framework.version} + test + + + + org.apache.seatunnel + connector-fake + ${seatunnel-framework.version} + test + + + + org.apache.seatunnel + connector-kafka + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-base + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-feishu + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-wechat + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-myhours + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-lemlist + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-klaviyo + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-onesignal + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-notion + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-jdbc + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-socket + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-clickhouse + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-pulsar + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-hive + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-file-hadoop + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-file-local + ${seatunnel-framework.version} + test + org.apache.seatunnel - seatunnel-common - ${seatunnel-common.version} + connector-file-oss + ${seatunnel-framework.version} + test - - org.projectlombok - lombok - ${lombok.version} - provided + org.apache.seatunnel + connector-file-oss-jindo + ${seatunnel-framework.version} + test - - org.apache.commons - commons-lang3 - ${commons-lang3.version} + org.apache.seatunnel + connector-file-ftp + ${seatunnel-framework.version} + test - org.apache.commons - commons-collections4 - ${commons-collections4.version} + org.apache.seatunnel + connector-file-sftp + ${seatunnel-framework.version} + test - org.junit - junit-bom - ${junit.version} - pom - import + org.apache.seatunnel + connector-hudi + ${seatunnel-framework.version} + test - - com.fasterxml.jackson.core - jackson-databind - ${jackson.version} + org.apache.seatunnel + connector-dingtalk + ${seatunnel-framework.version} + test - - com.google.guava - guava - ${guava.version} + org.apache.seatunnel + connector-kudu + ${seatunnel-framework.version} + test - - org.apache.logging.log4j - log4j-core - ${log4j-core.version} + org.apache.seatunnel + connector-email + ${seatunnel-framework.version} + test - org.slf4j - slf4j-api - ${slf4j.version} + org.apache.seatunnel + connector-elasticsearch + ${seatunnel-framework.version} + test - - org.slf4j - slf4j-log4j12 - ${slf4j.version} + org.apache.seatunnel + connector-iotdb + ${seatunnel-framework.version} + test - - org.checkerframework - checker-qual - ${checker.qual.version} + org.apache.seatunnel + connector-neo4j + ${seatunnel-framework.version} + test - - org.awaitility - awaitility - ${awaitility.version} + org.apache.seatunnel + connector-redis + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-google-sheets + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-datahub + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-sentry + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-mongodb + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-iceberg + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-influxdb + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-cassandra + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-file-s3 + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-amazondynamodb + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-starrocks + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-tablestore + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-slack + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-gitlab + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-http-jira + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-rabbitmq + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-openmldb + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-doris + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-maxcompute + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-cdc-mysql + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-cdc-sqlserver + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-cdc-oracle + ${seatunnel-framework.version} test @@ -194,6 +876,23 @@ + + org.apache.commons + commons-lang3 + ${commons-lang3.version} + + + org.apache.seatunnel + connector-cdc-sqlserver + ${seatunnel-framework.version} + test + + + org.apache.seatunnel + connector-cdc-mysql + ${seatunnel-framework.version} + test + org.projectlombok lombok @@ -201,7 +900,40 @@ org.slf4j slf4j-api - ${slf4j.version} + + + + + ch.qos.logback + logback-classic + + + ch.qos.logback + logback-core + + + + org.apache.logging.log4j + log4j-api + + + + org.slf4j + log4j-over-slf4j + + + + + + org.slf4j + jcl-over-slf4j + + + + org.apache.logging.log4j + log4j-to-slf4j org.junit.jupiter @@ -242,8 +974,7 @@ ${skipUT} - ${project.build.directory}/jacoco.exec - + ${project.build.directory}/jacoco.exec **/*IT.java @@ -251,7 +982,6 @@ - org.apache.maven.plugins @@ -259,51 +989,53 @@ ${maven-assembly-plugin.version} - + org.apache.maven.plugins - maven-checkstyle-plugin - ${maven-checkstyle-plugin.version} + maven-shade-plugin + ${maven-shade-plugin.version} - - ${maven.multiModuleProjectDirectory}/tools/checkstyle/checkStyle.xml - - UTF-8 - true - true - ${checkstyle.fails.on.error} - - ${project.build.sourceDirectory} - ${project.build.testSourceDirectory} - - - **/*.properties, - **/*.sh, - **/*.bat, - **/*.yml, - **/*.yaml, - **/*.xml - - - **/.asf.yaml, - **/.github/** - - - + false + true + + true + + + org.slf4j:* + ch.qos.logback:* + log4j:* + org.apache.logging.log4j:* + commons-logging:* + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + - validate - process-sources - check + shade + package + + + + + + - - org.apache.maven.plugins maven-source-plugin @@ -339,7 +1071,6 @@ - org.codehaus.mojo build-helper-maven-plugin @@ -351,7 +1082,7 @@ license-maven-plugin ${maven-license-maven-plugin} - ${project.basedir}/seatunnel-dist/target/ + ${project.basedir}/seatunnel-web-dist/target/ THIRD-PARTY.txt false false @@ -362,6 +1093,112 @@ test,provided + + + org.codehaus.mojo + flatten-maven-plugin + ${flatten-maven-plugin.version} + + true + resolveCiFriendliesOnly + + + + flatten + + flatten + + process-resources + + + flatten.clean + + clean + + clean + + + + + org.apache.maven.plugins + maven-dependency-plugin + ${maven-dependency-plugin.version} + + + + com.diffplug.spotless + spotless-maven-plugin + ${spotless.version} + + + + 1.7 + + + + + + org.apache.seatunnel.shade,org.apache.seatunnel,org.apache,org,,javax,java,\# + + + Remove wildcard imports + import\s+(static)*\s*[^\*\s]+\*;(\r\n|\r|\n) + $1 + + + Block powermock + import\s+org\.powermock\.[^\*\s]*(|\*);(\r\n|\r|\n) + $1 + + + Block jUnit4 imports + import\s+org\.junit\.[^jupiter][^\*\s]*(|\*);(\r\n|\r|\n) + $1 + + + + + UTF-8 + 4 + true + false + true + true + false + false + custom_1 + false + false + + + Leading blank line + project + project + + + + + docs/**/*.md + + + **/.github/**/*.md + + + + + true + + + + + + check + + compile + + + + @@ -376,6 +1213,7 @@ org.apache.maven.plugins maven-release-plugin + 3.0.1 true @{project.version} @@ -394,10 +1232,6 @@ org.apache.maven.plugins maven-surefire-plugin - - org.apache.maven.plugins - maven-failsafe-plugin - org.apache.maven.plugins @@ -408,7 +1242,48 @@ org.codehaus.mojo license-maven-plugin + + + com.diffplug.spotless + spotless-maven-plugin + + https://github.com/apache/incubator-seatunnel + + + + The Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + + + + + + SeaTunnel Developer List + dev-subscribe@seatunnel.apache.org + dev-unsubscribe@seatunnel.apache.org + dev@seatunnel.apache.org + + + SeaTunnel Commits List + commits-subscribe@seatunnel.apache.org + commits-unsubscribe@seatunnel.apache.org + commits@seatunnel.apache.org + + + + + scm:git:https://github.com/apache/incubator-seatunnel.git + scm:git:https://github.com/apache/incubator-seatunnel.git + https://github.com/apache/incubator-seatunnel + HEAD + + + + GitHub + https://github.com/apache/incubator-seatunnel/issues + + diff --git a/seatunnel-datasource/pom.xml b/seatunnel-datasource/pom.xml new file mode 100644 index 000000000..a6fcbc7a5 --- /dev/null +++ b/seatunnel-datasource/pom.xml @@ -0,0 +1,40 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-web + ${revision} + + + seatunnel-datasource + pom + + + seatunnel-datasource-client + seatunnel-datasource-plugins + + + + + com.google.auto.service + auto-service + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-client/pom.xml b/seatunnel-datasource/seatunnel-datasource-client/pom.xml new file mode 100644 index 000000000..e697c6777 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/pom.xml @@ -0,0 +1,71 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource + ${revision} + + + seatunnel-datasource-client + + + + org.apache.seatunnel + seatunnel-api + + + org.apache.commons + commons-lang3 + + + com.google.guava + guava + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + + + + org.apache.seatunnel + datasource-all + ${project.version} + provided + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + ${e2e.dependency.skip} + true + + + + + org.apache.maven.plugins + maven-shade-plugin + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/AbstractDataSourceClient.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/AbstractDataSourceClient.java new file mode 100644 index 000000000..b9e3b4f33 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/AbstractDataSourceClient.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.exception.DataSourceSDKException; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.service.DataSourceService; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.ServiceLoader; +import java.util.concurrent.atomic.AtomicInteger; + +import static com.google.common.base.Preconditions.checkNotNull; + +public abstract class AbstractDataSourceClient implements DataSourceService { + + private Map supportedDataSourceInfo = new HashMap<>(); + + private Map supportedDataSourceIndex = new HashMap<>(); + + protected List supportedDataSources = new ArrayList<>(); + + private List dataSourceChannels = new ArrayList<>(); + + protected AbstractDataSourceClient() { + AtomicInteger dataSourceIndex = new AtomicInteger(); + ServiceLoader.load(DataSourceFactory.class) + .forEach( + seaTunnelDataSourceFactory -> { + seaTunnelDataSourceFactory + .supportedDataSources() + .forEach( + dataSourceInfo -> { + supportedDataSourceInfo.put( + dataSourceInfo.getName().toUpperCase(), + dataSourceInfo); + supportedDataSourceIndex.put( + dataSourceInfo.getName().toUpperCase(), + dataSourceIndex.get()); + supportedDataSources.add(dataSourceInfo); + }); + dataSourceChannels.add(seaTunnelDataSourceFactory.createChannel()); + dataSourceIndex.getAndIncrement(); + }); + if (supportedDataSourceInfo.isEmpty()) { + throw new DataSourceSDKException("No supported data source found"); + } + } + + @Override + public Boolean checkDataSourceConnectivity( + String pluginName, Map dataSourceParams) { + return getDataSourceChannel(pluginName) + .checkDataSourceConnectivity(pluginName, dataSourceParams); + } + + @Override + public List listAllDataSources() { + return supportedDataSources; + } + + protected DataSourceChannel getDataSourceChannel(String pluginName) { + checkNotNull(pluginName, "pluginName cannot be null"); + Integer index = supportedDataSourceIndex.get(pluginName.toUpperCase()); + if (index == null) { + throw new DataSourceSDKException( + "The %s plugin is not supported or plugin not exist.", pluginName); + } + return dataSourceChannels.get(index); + } + + @Override + public OptionRule queryDataSourceFieldByName(String pluginName) { + return getDataSourceChannel(pluginName).getDataSourceOptions(pluginName); + } + + @Override + public OptionRule queryMetadataFieldByName(String pluginName) { + return getDataSourceChannel(pluginName) + .getDatasourceMetadataFieldsByDataSourceName(pluginName); + } + + @Override + public List getTables( + String pluginName, String databaseName, Map requestParams) { + return getDataSourceChannel(pluginName).getTables(pluginName, requestParams, databaseName); + } + + @Override + public List getDatabases(String pluginName, Map requestParams) { + return getDataSourceChannel(pluginName).getDatabases(pluginName, requestParams); + } + + @Override + public List getTableFields( + String pluginName, + Map requestParams, + String databaseName, + String tableName) { + return getDataSourceChannel(pluginName) + .getTableFields(pluginName, requestParams, databaseName, tableName); + } + + @Override + public Map> getTableFields( + String pluginName, + Map requestParams, + String databaseName, + List tableNames) { + return getDataSourceChannel(pluginName) + .getTableFields(pluginName, requestParams, databaseName, tableNames); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/DataSourceClient.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/DataSourceClient.java new file mode 100644 index 000000000..4bf46baa1 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/DataSourceClient.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource; + +import org.apache.seatunnel.datasource.annotation.ThreadSafe; + +@ThreadSafe +public class DataSourceClient extends AbstractDataSourceClient { + + public DataSourceClient() { + super(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/annotation/ThreadSafe.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/annotation/ThreadSafe.java new file mode 100644 index 000000000..55794b81c --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/annotation/ThreadSafe.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.annotation; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Documented +@Target({ElementType.TYPE}) +@Retention(RetentionPolicy.CLASS) +public @interface ThreadSafe {} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/exception/DataSourceSDKException.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/exception/DataSourceSDKException.java new file mode 100644 index 000000000..3f90eabc1 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/exception/DataSourceSDKException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.exception; + +public class DataSourceSDKException extends RuntimeException { + + public DataSourceSDKException(String message) { + super(message); + } + + public DataSourceSDKException(String message, Throwable cause) { + super(message, cause); + } + + public DataSourceSDKException(Throwable cause) { + super(cause); + } + + public DataSourceSDKException(String message, Object... args) { + super(String.format(message, args)); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/PageQueryRequest.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/PageQueryRequest.java new file mode 100644 index 000000000..8b09bc355 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/PageQueryRequest.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.request; + +import lombok.Data; +import lombok.experimental.SuperBuilder; + +import java.util.Map; + +@Data +@SuperBuilder +public class PageQueryRequest { + + private Integer pageNum; + + private Integer pageSize; + + private String orderBy; + + private String order; + + private String search; + + Map fuzzySearchParamMap; + + Map exactSearchParamMap; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableCreateRequest.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableCreateRequest.java new file mode 100644 index 000000000..3d7c42208 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableCreateRequest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.request; + +import lombok.Builder; +import lombok.Data; + +import java.util.List; + +@Data +@Builder +public class VirtualTableCreateRequest { + + private Long id; + + private Long dataSourceId; + + private String description; + + private String databaseName; + + private String tableName; + + private List fields; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableFieldRequest.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableFieldRequest.java new file mode 100644 index 000000000..f2acb83c2 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableFieldRequest.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.request; + +import lombok.Builder; +import lombok.Data; + +import java.util.Map; + +@Data +@Builder +public class VirtualTableFieldRequest { + + private String type; + + private String name; + + private String comment; + + private Boolean primaryKey; + + private String defaultValue; + + private Boolean nullable; + + private Map properties; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableUpdateRequest.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableUpdateRequest.java new file mode 100644 index 000000000..0939687c6 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/request/VirtualTableUpdateRequest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.request; + +import lombok.Builder; +import lombok.Data; + +import java.util.List; + +@Data +@Builder +public class VirtualTableUpdateRequest { + + private String description; + + private String databaseName; + + private String tableName; + + private List fields; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/BaseResponse.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/BaseResponse.java new file mode 100644 index 000000000..831a2a64a --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/BaseResponse.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.response; + +import lombok.Data; +import lombok.experimental.SuperBuilder; + +import java.util.Date; + +@Data +@SuperBuilder +public class BaseResponse { + + /** create time */ + private Date createTime; + + /** update time */ + private Date updateTime; + + private String createUserId; + + private String updateUserId; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/DataSourceDetailResponse.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/DataSourceDetailResponse.java new file mode 100644 index 000000000..f489ad455 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/DataSourceDetailResponse.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.response; + +import lombok.Data; +import lombok.experimental.SuperBuilder; + +import java.util.Map; + +@Data +@SuperBuilder +public class DataSourceDetailResponse extends BaseResponse { + + private Long id; + + private String dataSourceName; + + private String pluginName; + + private String comment; + + private Map params; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/PageDataResponse.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/PageDataResponse.java new file mode 100644 index 000000000..24e801cce --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/PageDataResponse.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.response; + +import lombok.Builder; +import lombok.Data; + +import java.util.List; + +@Data +@Builder +public class PageDataResponse { + private Integer pageNum; + private Integer pageSize; + private Integer total; + private List datas; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/PageResultResponse.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/PageResultResponse.java new file mode 100644 index 000000000..10bab20c8 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/PageResultResponse.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.response; + +import lombok.Data; + +import java.util.List; + +@Data +public class PageResultResponse { + + private Integer pageNum; + + private Integer pageSize; + + private Integer total; + + private Integer pages; + + private List dataList; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableDetailResponse.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableDetailResponse.java new file mode 100644 index 000000000..63ab5d575 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableDetailResponse.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.response; + +import lombok.Data; +import lombok.experimental.SuperBuilder; + +import java.util.List; + +@Data +@SuperBuilder +public class VirtualTableDetailResponse extends BaseResponse { + + private Long id; + + private Long dataSourceId; + + private String description; + + private String type; + + private String databaseName; + + private String tableName; + + private List fields; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableFieldResponse.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableFieldResponse.java new file mode 100644 index 000000000..5e8583ef4 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableFieldResponse.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.response; + +import lombok.Data; +import lombok.experimental.SuperBuilder; + +import java.util.Map; + +@Data +@SuperBuilder +public class VirtualTableFieldResponse { + + private String type; + + private String name; + + private String comment; + + private Boolean primaryKey; + + private String defaultValue; + + private Boolean nullable; + + private Map properties; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableResponse.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableResponse.java new file mode 100644 index 000000000..8e061ad49 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/response/VirtualTableResponse.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.response; + +import lombok.Data; +import lombok.experimental.SuperBuilder; + +@Data +@SuperBuilder +public class VirtualTableResponse extends BaseResponse { + + private Long id; + + private Long dataSourceId; + + private String description; + + private String databaseName; + + private String tableName; +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/service/DataSourceService.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/service/DataSourceService.java new file mode 100644 index 000000000..e777d5e9c --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/service/DataSourceService.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.service; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import java.util.List; +import java.util.Map; + +public interface DataSourceService { + + /** + * get all data source plugins + * + * @return data source plugins info + */ + List listAllDataSources(); + + /** + * get data source plugin fields + * + * @param pluginName data source name + * @return data source plugin fields + */ + OptionRule queryDataSourceFieldByName(String pluginName); + + /** + * get data source metadata fields + * + * @param pluginName data source name + * @return data source metadata fields + */ + OptionRule queryMetadataFieldByName(String pluginName); + + /** + * check data source params is valid and connectable + * + * @param parameters data source params eg mysql plugin key: url // jdbc url key: username key: + * password other key... + * @return true if valid, false if invalid + */ + /** + * we can use this method to check data source connectivity + * + * @param pluginName source params + * @return check result + */ + Boolean checkDataSourceConnectivity(String pluginName, Map datasourceParams); + + /** + * get data source table names by database name + * + * @param pluginName plugin name + * @param databaseName database name + * @param requestParams connection params + * @return table names + */ + List getTables( + String pluginName, String databaseName, Map requestParams); + + /** + * get data source database names + * + * @param pluginName plugin name + * @param requestParams connection params + * @return database names + */ + List getDatabases(String pluginName, Map requestParams); + + /** + * get data source table fields + * + * @param pluginName plugin name + * @param requestParams connection params + * @param databaseName database name + * @param tableName table name + * @return table fields + */ + List getTableFields( + String pluginName, + Map requestParams, + String databaseName, + String tableName); + + /** + * get data source table fields + * + * @param pluginName plugin name + * @param requestParams connection params + * @param databaseName database name + * @param tableNames table names + * @return table fields + */ + Map> getTableFields( + String pluginName, + Map requestParams, + String databaseName, + List tableNames); +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/test/java/com/whaleops/datasource/s3/S3DatasourceChannelTest.java b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/com/whaleops/datasource/s3/S3DatasourceChannelTest.java new file mode 100644 index 000000000..ff13771a7 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/com/whaleops/datasource/s3/S3DatasourceChannelTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.whaleops.datasource.s3; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import com.google.common.collect.ImmutableMap; + +import java.util.Map; + +@Disabled +class S3DatasourceChannelTest { + // private static S3DatasourceChannel S3_DATASOURCE_CHANNEL = new S3DatasourceChannel(); + + @Test + void checkDataSourceConnectivity() { + Assertions.assertDoesNotThrow( + () -> { + // S3_DATASOURCE_CHANNEL.checkDataSourceConnectivity("S3", + // createRequestParams()); + }); + } + + private Map createRequestParams() { + Map requestParams = + new ImmutableMap.Builder() + .put("bucket", "s3a://poc-kuke") + .put("fs.s3a.endpoint", "s3.cn-north-1.amazonaws.com.cn") + .put( + "fs.s3a.aws.credentials.provider", + "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider") + .put("access_key", "AKIAYYUV5DMXADXRBGTA") + .put("secret_key", "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") + .put("hadoop_s3_properties", "") + .build(); + return requestParams; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/test/java/org/apache/seatunnel/datasource/DataSourceClientTest.java b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/org/apache/seatunnel/datasource/DataSourceClientTest.java new file mode 100644 index 000000000..09d0f5fb4 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/org/apache/seatunnel/datasource/DataSourceClientTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource; + +import org.apache.commons.lang3.StringUtils; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class DataSourceClientTest { + private static final DataSourceClient DATA_SOURCE_CLIENT = new DataSourceClient(); + + @Test + public void listAllDataSources() { + Assertions.assertTrue( + DATA_SOURCE_CLIENT.listAllDataSources().stream() + .anyMatch( + dataSourcePluginInfo -> + StringUtils.equalsIgnoreCase( + dataSourcePluginInfo.getName(), "jdbc-mysql"))); + Assertions.assertTrue( + DATA_SOURCE_CLIENT.listAllDataSources().stream() + .anyMatch( + dataSourcePluginInfo -> + StringUtils.equalsIgnoreCase( + dataSourcePluginInfo.getName(), "kafka"))); + Assertions.assertTrue( + DATA_SOURCE_CLIENT.listAllDataSources().stream() + .anyMatch( + dataSourcePluginInfo -> + StringUtils.equalsIgnoreCase( + dataSourcePluginInfo.getName(), "elasticsearch"))); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-all/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-all/pom.xml new file mode 100644 index 000000000..64d904df9 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-all/pom.xml @@ -0,0 +1,112 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-all + + + + org.apache.seatunnel + datasource-jdbc-clickhouse + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-hive + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-mysql + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-oracle + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-postgresql + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-tidb + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-redshift + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-sqlserver + ${project.version} + + + org.apache.seatunnel + datasource-jdbc-starrocks + ${project.version} + + + + org.apache.seatunnel + datasource-kafka + ${project.version} + + + + org.apache.seatunnel + datasource-elasticsearch + ${project.version} + + + org.apache.seatunnel + datasource-s3-redshift + ${project.version} + + + org.apache.seatunnel + datasource-starrocks + ${project.version} + + + org.apache.seatunnel + datasource-mysql-cdc + ${project.version} + + + org.apache.seatunnel + datasource-s3 + ${project.version} + + + org.apache.seatunnel + datasource-sqlserver-cdc + ${project.version} + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/pom.xml new file mode 100644 index 000000000..017a221ad --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/pom.xml @@ -0,0 +1,64 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-elasticsearch + + + 7.5.1 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + org.elasticsearch.client + elasticsearch-rest-client + ${elasticsearch-rest-client.version} + + + io.airlift + security + 206 + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannel.java new file mode 100644 index 000000000..989e45fd9 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannel.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch; + +import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.elasticsearch.client.EsRestClient; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ElasticSearchDataSourceChannel implements DataSourceChannel { + + private static final String DATABASE = "default"; + + @Override + public boolean canAbleGetSchema() { + return true; + } + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return ElasticSearchOptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return ElasticSearchOptionRule.metadataRule(); + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + databaseCheck(database); + try (EsRestClient client = + EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) { + return client.listIndex(); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + return DEFAULT_DATABASES; + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (EsRestClient client = + EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) { + client.getClusterInfo(); + return true; + } catch (Throwable e) { + throw new DataSourcePluginException( + "check ElasticSearch connectivity failed, " + e.getMessage(), e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + databaseCheck(database); + try (EsRestClient client = + EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) { + Map fieldTypeMapping = client.getFieldTypeMapping(table); + List fields = new ArrayList<>(); + fieldTypeMapping.forEach( + (fieldName, fieldType) -> + fields.add(convertToTableField(fieldName, fieldType))); + return fields; + } catch (Exception ex) { + throw new DataSourcePluginException("Get table fields failed", ex); + } + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + databaseCheck(database); + Map> tableFields = new HashMap<>(); + tables.forEach( + table -> + tableFields.put( + table, getTableFields(pluginName, requestParams, database, table))); + return tableFields; + } + + private static void databaseCheck(@NonNull String database) { + if (!StringUtils.equalsIgnoreCase(database, DATABASE)) { + throw new IllegalArgumentException("database not found: " + database); + } + } + + private TableField convertToTableField(String fieldName, String fieldType) { + TableField tableField = new TableField(); + tableField.setName(fieldName); + tableField.setType(fieldType); + tableField.setComment(null); + tableField.setNullable(true); + tableField.setPrimaryKey(fieldName.equals("_id")); + tableField.setDefaultValue(null); + return tableField; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceConfig.java new file mode 100644 index 000000000..97eec189f --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceConfig.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch; + +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +public class ElasticSearchDataSourceConfig { + + public static final String PLUGIN_NAME = "ElasticSearch"; + + public static final String PLUGIN_VERSION = "1.0.0"; + + public static final DataSourcePluginInfo ELASTICSEARCH_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version(PLUGIN_VERSION) + .type(DatasourcePluginTypeEnum.NO_STRUCTURED.getCode()) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactory.java new file mode 100644 index 000000000..f9d01cae3 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactory.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; + +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class ElasticSearchDataSourceFactory implements DataSourceFactory { + + public static final String PLUGIN_NAME = "ElasticSearch"; + + @Override + public String factoryIdentifier() { + return PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(ElasticSearchDataSourceConfig.ELASTICSEARCH_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new ElasticSearchDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchOptionRule.java new file mode 100644 index 000000000..510978797 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchOptionRule.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +import java.util.List; + +public class ElasticSearchOptionRule { + + public static final Option> HOSTS = + Options.key("hosts") + .listType() + .noDefaultValue() + .withDescription( + "Elasticsearch cluster http address, the format is host:port, allowing multiple hosts to be specified. Such as [\"host1:9200\", \"host2:9200\"]"); + + public static final Option INDEX = + Options.key("index") + .stringType() + .noDefaultValue() + .withDescription("Elasticsearch index name, support * fuzzy matching"); + + public static final Option USERNAME = + Options.key("username") + .stringType() + .noDefaultValue() + .withDescription("x-pack username"); + + public static final Option PASSWORD = + Options.key("password") + .stringType() + .noDefaultValue() + .withDescription("x-pack password"); + + public static final Option TLS_VERIFY_CERTIFICATE = + Options.key("tls_verify_certificate") + .booleanType() + .defaultValue(true) + .withDescription("Enable certificates validation for HTTPS endpoints"); + + public static final Option TLS_VERIFY_HOSTNAME = + Options.key("tls_verify_hostname") + .booleanType() + .defaultValue(true) + .withDescription("Enable hostname validation for HTTPS endpoints"); + + public static final Option TLS_KEY_STORE_PATH = + Options.key("tls_keystore_path") + .stringType() + .noDefaultValue() + .withDescription( + "The path to the PEM or JKS key store. This file must be readable by the operating system user running SeaTunnel."); + + public static final Option TLS_KEY_STORE_PASSWORD = + Options.key("tls_keystore_password") + .stringType() + .noDefaultValue() + .withDescription("The key password for the key store specified"); + + public static final Option TLS_TRUST_STORE_PATH = + Options.key("tls_truststore_path") + .stringType() + .noDefaultValue() + .withDescription( + "The path to PEM or JKS trust store. This file must be readable by the operating system user running SeaTunnel."); + + public static final Option TLS_TRUST_STORE_PASSWORD = + Options.key("tls_truststore_password") + .stringType() + .noDefaultValue() + .withDescription("The key password for the trust store specified"); + + public static OptionRule optionRule() { + return OptionRule.builder() + .required(HOSTS) + .optional( + USERNAME, + PASSWORD, + TLS_VERIFY_CERTIFICATE, + TLS_VERIFY_HOSTNAME, + TLS_KEY_STORE_PATH, + TLS_KEY_STORE_PASSWORD, + TLS_TRUST_STORE_PATH, + TLS_TRUST_STORE_PASSWORD) + .build(); + } + + public static OptionRule metadataRule() { + return OptionRule.builder().required(INDEX).build(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/ElasticsearchClusterInfo.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/ElasticsearchClusterInfo.java new file mode 100644 index 000000000..f02516ce9 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/ElasticsearchClusterInfo.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch.client; + +import lombok.Builder; +import lombok.Getter; +import lombok.ToString; + +@Getter +@Builder +@ToString +public class ElasticsearchClusterInfo { + private String distribution; + private String clusterVersion; +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/EsRestClient.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/EsRestClient.java new file mode 100644 index 000000000..4de06bf4a --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/EsRestClient.java @@ -0,0 +1,373 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch.client; + +import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.JsonNode; +import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.seatunnel.shade.com.typesafe.config.Config; + +import org.apache.seatunnel.common.utils.JsonUtils; +import org.apache.seatunnel.datasource.plugin.elasticsearch.ElasticSearchOptionRule; + +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.TrustAllStrategy; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.ssl.SSLContexts; +import org.apache.http.util.EntityUtils; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; + +import lombok.extern.slf4j.Slf4j; + +import javax.net.ssl.SSLContext; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +@Slf4j +public class EsRestClient implements AutoCloseable { + + private static final int CONNECTION_REQUEST_TIMEOUT = 10 * 1000; + + private static final int SOCKET_TIMEOUT = 5 * 60 * 1000; + + private final RestClient restClient; + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private EsRestClient(RestClient restClient) { + this.restClient = restClient; + } + + public static EsRestClient createInstance(Config pluginConfig) { + try { + List hosts = + OBJECT_MAPPER.readValue( + pluginConfig.getString(ElasticSearchOptionRule.HOSTS.key()), + List.class); + Optional username = Optional.empty(); + Optional password = Optional.empty(); + if (pluginConfig.hasPath(ElasticSearchOptionRule.USERNAME.key())) { + username = + Optional.of(pluginConfig.getString(ElasticSearchOptionRule.USERNAME.key())); + if (pluginConfig.hasPath(ElasticSearchOptionRule.PASSWORD.key())) { + password = + Optional.of( + pluginConfig.getString(ElasticSearchOptionRule.PASSWORD.key())); + } + } + Optional keystorePath = Optional.empty(); + Optional keystorePassword = Optional.empty(); + Optional truststorePath = Optional.empty(); + Optional truststorePassword = Optional.empty(); + boolean tlsVerifyCertificate = + ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.defaultValue(); + if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.key())) { + tlsVerifyCertificate = + pluginConfig.getBoolean( + ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.key()); + } + if (tlsVerifyCertificate) { + if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_KEY_STORE_PATH.key())) { + keystorePath = + Optional.of( + pluginConfig.getString( + ElasticSearchOptionRule.TLS_KEY_STORE_PATH.key())); + } + if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_KEY_STORE_PASSWORD.key())) { + keystorePassword = + Optional.of( + pluginConfig.getString( + ElasticSearchOptionRule.TLS_KEY_STORE_PASSWORD.key())); + } + if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_TRUST_STORE_PATH.key())) { + truststorePath = + Optional.of( + pluginConfig.getString( + ElasticSearchOptionRule.TLS_TRUST_STORE_PATH.key())); + } + if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_TRUST_STORE_PASSWORD.key())) { + truststorePassword = + Optional.of( + pluginConfig.getString( + ElasticSearchOptionRule.TLS_TRUST_STORE_PASSWORD + .key())); + } + } + boolean tlsVerifyHostnames = ElasticSearchOptionRule.TLS_VERIFY_HOSTNAME.defaultValue(); + if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_VERIFY_HOSTNAME.key())) { + tlsVerifyHostnames = + pluginConfig.getBoolean(ElasticSearchOptionRule.TLS_VERIFY_HOSTNAME.key()); + } + return createInstance( + hosts, + username, + password, + tlsVerifyCertificate, + tlsVerifyHostnames, + keystorePath, + keystorePassword, + truststorePath, + truststorePassword); + } catch (Exception e) { + throw new RuntimeException("Create EsRestClient failed", e); + } + } + + public static EsRestClient createInstance( + List hosts, + Optional username, + Optional password, + boolean tlsVerifyCertificate, + boolean tlsVerifyHostnames, + Optional keystorePath, + Optional keystorePassword, + Optional truststorePath, + Optional truststorePassword) { + RestClientBuilder restClientBuilder = + getRestClientBuilder( + hosts, + username, + password, + tlsVerifyCertificate, + tlsVerifyHostnames, + keystorePath, + keystorePassword, + truststorePath, + truststorePassword); + return new EsRestClient(restClientBuilder.build()); + } + + private static RestClientBuilder getRestClientBuilder( + List hosts, + Optional username, + Optional password, + boolean tlsVerifyCertificate, + boolean tlsVerifyHostnames, + Optional keystorePath, + Optional keystorePassword, + Optional truststorePath, + Optional truststorePassword) { + HttpHost[] httpHosts = new HttpHost[hosts.size()]; + for (int i = 0; i < hosts.size(); i++) { + httpHosts[i] = HttpHost.create(hosts.get(i)); + } + + RestClientBuilder restClientBuilder = + RestClient.builder(httpHosts) + .setRequestConfigCallback( + requestConfigBuilder -> + requestConfigBuilder + .setConnectionRequestTimeout( + CONNECTION_REQUEST_TIMEOUT) + .setSocketTimeout(SOCKET_TIMEOUT)); + + restClientBuilder.setHttpClientConfigCallback( + httpClientBuilder -> { + if (username.isPresent()) { + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + AuthScope.ANY, + new UsernamePasswordCredentials(username.get(), password.get())); + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + + try { + if (tlsVerifyCertificate) { + Optional sslContext = + SSLUtils.buildSSLContext( + keystorePath, + keystorePassword, + truststorePath, + truststorePassword); + sslContext.ifPresent(e -> httpClientBuilder.setSSLContext(e)); + } else { + SSLContext sslContext = + SSLContexts.custom() + .loadTrustMaterial(new TrustAllStrategy()) + .build(); + httpClientBuilder.setSSLContext(sslContext); + } + if (!tlsVerifyHostnames) { + httpClientBuilder.setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + return httpClientBuilder; + }); + return restClientBuilder; + } + + public ElasticsearchClusterInfo getClusterInfo() { + Request request = new Request("GET", "/"); + try { + Response response = restClient.performRequest(request); + String result = EntityUtils.toString(response.getEntity()); + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode jsonNode = objectMapper.readTree(result); + JsonNode versionNode = jsonNode.get("version"); + return ElasticsearchClusterInfo.builder() + .clusterVersion(versionNode.get("number").asText()) + .distribution( + Optional.ofNullable(versionNode.get("distribution")) + .map(JsonNode::asText) + .orElse(null)) + .build(); + } catch (IOException e) { + throw new ResponseException("fail to get elasticsearch version.", e); + } + } + + public void close() { + try { + restClient.close(); + } catch (IOException e) { + log.warn("close elasticsearch connection error", e); + } + } + + public List listIndex() { + String endpoint = "/_cat/indices?format=json"; + Request request = new Request("GET", endpoint); + try { + Response response = restClient.performRequest(request); + if (response == null) { + throw new ResponseException("GET " + endpoint + " response null"); + } + if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { + String entity = EntityUtils.toString(response.getEntity()); + return JsonUtils.toList(entity, Map.class).stream() + .map(map -> map.get("index").toString()) + .collect(Collectors.toList()); + } else { + throw new ResponseException( + String.format( + "GET %s response status code=%d", + endpoint, response.getStatusLine().getStatusCode())); + } + } catch (IOException ex) { + throw new ResponseException(ex); + } + } + + public void dropIndex(String tableName) { + String endpoint = String.format("/%s", tableName); + Request request = new Request("DELETE", endpoint); + try { + Response response = restClient.performRequest(request); + if (response == null) { + throw new ResponseException("DELETE " + endpoint + " response null"); + } + // todo: if the index doesn't exist, the response status code is 200? + if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { + throw new ResponseException( + String.format( + "DELETE %s response status code=%d", + endpoint, response.getStatusLine().getStatusCode())); + } + } catch (IOException ex) { + throw new ResponseException(ex); + } + } + + /** + * get es field name and type mapping relation + * + * @param index index name + * @return {key-> field name,value->es type} + */ + public Map getFieldTypeMapping(String index) { + String endpoint = String.format("/%s/_mappings", index); + Request request = new Request("GET", endpoint); + Map mapping = new HashMap<>(); + try { + Response response = restClient.performRequest(request); + if (response == null) { + throw new ResponseException("GET " + endpoint + " response null"); + } + if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { + throw new ResponseException( + String.format( + "GET %s response status code=%d", + endpoint, response.getStatusLine().getStatusCode())); + } + String entity = EntityUtils.toString(response.getEntity()); + log.info(String.format("GET %s response=%s", endpoint, entity)); + ObjectNode responseJson = JsonUtils.parseObject(entity); + for (Iterator it = responseJson.elements(); it.hasNext(); ) { + JsonNode indexProperty = it.next(); + JsonNode mappingsProperty = indexProperty.get("mappings"); + if (mappingsProperty.has("mappingsProperty")) { + JsonNode properties = mappingsProperty.get("properties"); + mapping = getFieldTypeMappingFromProperties(properties); + } else { + for (JsonNode typeNode : mappingsProperty) { + JsonNode properties; + if (typeNode.has("properties")) { + properties = typeNode.get("properties"); + } else { + properties = typeNode; + } + mapping.putAll(getFieldTypeMappingFromProperties(properties)); + } + } + } + } catch (IOException ex) { + throw new ResponseException(ex); + } + return mapping; + } + + private static Map getFieldTypeMappingFromProperties(JsonNode properties) { + Map mapping = new HashMap<>(); + for (Iterator it = properties.fieldNames(); it.hasNext(); ) { + String field = it.next(); + JsonNode fieldProperty = properties.get(field); + if (fieldProperty == null) { + mapping.put(field, "text"); + } else { + if (fieldProperty.has("type")) { + String type = fieldProperty.get("type").asText(); + mapping.put(field, type); + } else { + log.warn( + String.format( + "fail to get elasticsearch field %s mapping type,so give a default type text", + field)); + mapping.put(field, "text"); + } + } + } + return mapping; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/ResponseException.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/ResponseException.java new file mode 100644 index 000000000..59417cb70 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/ResponseException.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch.client; + +public class ResponseException extends RuntimeException { + + public ResponseException() { + super(); + } + + public ResponseException(String message) { + super(message); + } + + public ResponseException(String message, Throwable cause) { + super(message, cause); + } + + public ResponseException(Throwable cause) { + super(cause); + } + + protected ResponseException( + String message, + Throwable cause, + boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/SSLUtils.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/SSLUtils.java new file mode 100644 index 000000000..8f3caa204 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/SSLUtils.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch.client; + +import io.airlift.security.pem.PemReader; + +import javax.net.ssl.KeyManager; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.X509TrustManager; +import javax.security.auth.x500.X500Principal; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.CertificateExpiredException; +import java.security.cert.CertificateNotYetValidException; +import java.security.cert.X509Certificate; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + +import static java.util.Collections.list; + +@SuppressWarnings("MagicNumber") +public final class SSLUtils { + + public static Optional buildSSLContext( + Optional keyStorePath, + Optional keyStorePassword, + Optional trustStorePath, + Optional trustStorePassword) + throws GeneralSecurityException, IOException { + if (!keyStorePath.isPresent() && !trustStorePath.isPresent()) { + return Optional.empty(); + } + return Optional.of( + createSSLContext( + keyStorePath, keyStorePassword, trustStorePath, trustStorePassword)); + } + + private static SSLContext createSSLContext( + Optional keyStorePath, + Optional keyStorePassword, + Optional trustStorePath, + Optional trustStorePassword) + throws GeneralSecurityException, IOException { + // load KeyStore if configured and get KeyManagers + KeyStore keyStore = null; + KeyManager[] keyManagers = null; + if (keyStorePath.isPresent()) { + File keyStoreFile = new File(keyStorePath.get()); + char[] keyManagerPassword; + try { + // attempt to read the key store as a PEM file + keyStore = PemReader.loadKeyStore(keyStoreFile, keyStoreFile, keyStorePassword); + // for PEM encoded keys, the password is used to decrypt the specific key (and does + // not protect the keystore itself) + keyManagerPassword = new char[0]; + } catch (IOException | GeneralSecurityException ignored) { + keyManagerPassword = keyStorePassword.map(String::toCharArray).orElse(null); + + keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + try (InputStream in = new FileInputStream(keyStoreFile)) { + keyStore.load(in, keyManagerPassword); + } + } + validateCertificates(keyStore); + KeyManagerFactory keyManagerFactory = + KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + keyManagerFactory.init(keyStore, keyManagerPassword); + keyManagers = keyManagerFactory.getKeyManagers(); + } + + // load TrustStore if configured, otherwise use KeyStore + KeyStore trustStore = keyStore; + if (trustStorePath.isPresent()) { + File trustStoreFile = new File(trustStorePath.get()); + trustStore = loadTrustStore(trustStoreFile, trustStorePassword); + } + + // create TrustManagerFactory + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + // get X509TrustManager + TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); + if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) { + throw new RuntimeException( + "Unexpected default trust managers:" + Arrays.toString(trustManagers)); + } + // create SSLContext + SSLContext result = SSLContext.getInstance("SSL"); + result.init(keyManagers, trustManagers, null); + return result; + } + + private static KeyStore loadTrustStore(File trustStorePath, Optional trustStorePassword) + throws IOException, GeneralSecurityException { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + try { + // attempt to read the trust store as a PEM file + List certificateChain = PemReader.readCertificateChain(trustStorePath); + if (!certificateChain.isEmpty()) { + trustStore.load(null, null); + for (X509Certificate certificate : certificateChain) { + X500Principal principal = certificate.getSubjectX500Principal(); + trustStore.setCertificateEntry(principal.getName(), certificate); + } + return trustStore; + } + } catch (IOException | GeneralSecurityException ignored) { + // ignored + } + + try (InputStream in = new FileInputStream(trustStorePath)) { + trustStore.load(in, trustStorePassword.map(String::toCharArray).orElse(null)); + } + return trustStore; + } + + private static void validateCertificates(KeyStore keyStore) throws GeneralSecurityException { + for (String alias : list(keyStore.aliases())) { + if (!keyStore.isKeyEntry(alias)) { + continue; + } + Certificate certificate = keyStore.getCertificate(alias); + if (!(certificate instanceof X509Certificate)) { + continue; + } + + try { + ((X509Certificate) certificate).checkValidity(); + } catch (CertificateExpiredException e) { + throw new CertificateExpiredException( + "KeyStore certificate is expired: " + e.getMessage()); + } catch (CertificateNotYetValidException e) { + throw new CertificateNotYetValidException( + "KeyStore certificate is not yet valid: " + e.getMessage()); + } + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannelTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannelTest.java new file mode 100644 index 000000000..b888a1ff7 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannelTest.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch; + +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +import java.util.List; +import java.util.Map; + +// todo: use testcontainer to create container +@Disabled +class ElasticSearchDataSourceChannelTest { + private static final Logger LOGGER = + LoggerFactory.getLogger(ElasticSearchDataSourceChannelTest.class); + + private static final ElasticSearchDataSourceChannel ELASTIC_SEARCH_DATA_SOURCE_CHANNEL = + new ElasticSearchDataSourceChannel(); + + private static final String PLUGIN_NAME = "ElasticSearch"; + + private static final String DATABASE = "Default"; + + private static final Map REQUEST_MAP = + new ImmutableMap.Builder() + .put(ElasticSearchOptionRule.HOSTS.key(), "[\"http://localhost:9200\"]") + .build(); + + @Test + void canAbleGetSchema() { + Assertions.assertTrue(ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.canAbleGetSchema()); + } + + @Test + void getDataSourceOptions() { + Assertions.assertNotNull( + ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDataSourceOptions(PLUGIN_NAME)); + } + + @Test + void getDatasourceMetadataFieldsByDataSourceName() { + Assertions.assertNotNull( + ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDatasourceMetadataFieldsByDataSourceName( + PLUGIN_NAME)); + } + + @Test + void getTables() { + Assertions.assertDoesNotThrow( + () -> { + List tables = + ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTables( + PLUGIN_NAME, REQUEST_MAP, DATABASE); + LOGGER.info("{}", tables); + }); + } + + @Test + void getDatabases() { + Assertions.assertLinesMatch( + Lists.newArrayList("default"), + ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDatabases(PLUGIN_NAME, REQUEST_MAP)); + } + + @Test + void checkDataSourceConnectivity() { + Assertions.assertTrue( + ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.checkDataSourceConnectivity( + PLUGIN_NAME, REQUEST_MAP)); + } + + @Test + void getTableFields() { + Assertions.assertDoesNotThrow( + () -> { + List tableFields = + ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTableFields( + PLUGIN_NAME, REQUEST_MAP, DATABASE, ""); + LOGGER.info("{}", tableFields); + }); + } + + @Test + void testGetTableFields() { + Assertions.assertDoesNotThrow( + () -> { + Map> tableFields = + ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTableFields( + PLUGIN_NAME, REQUEST_MAP, DATABASE, Lists.newArrayList("")); + LOGGER.info("{}", tableFields); + }); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactoryTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactoryTest.java new file mode 100644 index 000000000..0441d7a12 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactoryTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.elasticsearch; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class ElasticSearchDataSourceFactoryTest { + + private static final ElasticSearchDataSourceFactory ELASTIC_SEARCH_DATA_SOURCE_FACTORY = + new ElasticSearchDataSourceFactory(); + + @Test + void factoryIdentifier() { + Assertions.assertEquals( + "ElasticSearch", ELASTIC_SEARCH_DATA_SOURCE_FACTORY.factoryIdentifier()); + } + + @Test + void supportedDataSources() { + Assertions.assertFalse(ELASTIC_SEARCH_DATA_SOURCE_FACTORY.supportedDataSources().isEmpty()); + } + + @Test + void createChannel() { + Assertions.assertNotNull(ELASTIC_SEARCH_DATA_SOURCE_FACTORY.createChannel()); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/pom.xml new file mode 100644 index 000000000..6537afe7c --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/pom.xml @@ -0,0 +1,61 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-clickhouse + + + 0.3.2-patch11 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + com.clickhouse + clickhouse-jdbc + ${clickhouse.version} + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseDataSourceConfig.java new file mode 100644 index 000000000..4c5b3b2e7 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseDataSourceConfig.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.clickhouse.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class ClickhouseDataSourceConfig { + public static final String PLUGIN_NAME = "JDBC-ClickHouse"; + + public static final DataSourcePluginInfo CLICKHOUSE_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .supportVirtualTables(false) + .build(); + + public static final Set CLICKHOUSE_SYSTEM_DATABASES = + Sets.newHashSet( + "system", + "default", + "information_schema", + "mysql", + "performance_schema", + "sys"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(ClickhouseOptionRule.URL, ClickhouseOptionRule.DRIVER) + .optional(ClickhouseOptionRule.USER, ClickhouseOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder() + .required(ClickhouseOptionRule.DATABASE, ClickhouseOptionRule.TABLE) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceChannel.java new file mode 100644 index 000000000..ef160afbc --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceChannel.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.clickhouse.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.google.common.base.Preconditions.checkNotNull; + +@Slf4j +public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return ClickhouseDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return ClickhouseDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); ) { + ResultSet resultSet = + connection + .getMetaData() + .getTables(database, null, null, new String[] {"TABLE"}); + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + Statement statement = connection.createStatement(); + ResultSet re = statement.executeQuery("SHOW DATABASES;")) { + // filter system databases + while (re.next()) { + String dbName = re.getString("name"); + if (StringUtils.isNotBlank(dbName) + && !ClickhouseDataSourceConfig.CLICKHOUSE_SYSTEM_DATABASES.contains( + dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (Exception ex) { + throw new RuntimeException("get databases failed", ex); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("Check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database)) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + try (ResultSet resultSet = metaData.getColumns(database, null, table, null)) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("Get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return null; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(ClickhouseOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(ClickhouseOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(ClickhouseOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(ClickhouseOptionRule.USER.key())) { + String username = requestParams.get(ClickhouseOptionRule.USER.key()); + String password = requestParams.get(ClickhouseOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceFactory.java new file mode 100644 index 000000000..f8b979864 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceFactory.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.clickhouse.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; +import lombok.extern.slf4j.Slf4j; + +import java.util.Set; + +@Slf4j +@AutoService(DataSourceFactory.class) +public class ClickhouseJdbcDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return ClickhouseDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(ClickhouseDataSourceConfig.CLICKHOUSE_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new ClickhouseJdbcDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseOptionRule.java new file mode 100644 index 000000000..f02e6030d --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseOptionRule.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.clickhouse.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class ClickhouseOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + + "jdbc:clickhouse://localhost:8123/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .noDefaultValue() + .withDescription("driver"); + + public enum DriverType { + ClickHouse("ru.yandex.clickhouse.ClickHouseDriver"); + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/pom.xml new file mode 100644 index 000000000..32dce470d --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/pom.xml @@ -0,0 +1,36 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-hive + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcConstants.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcConstants.java new file mode 100644 index 000000000..8b133ac7d --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcConstants.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.hive.jdbc; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class HiveJdbcConstants { + + public static final Set HIVE_SYSTEM_DATABASES = + Sets.newHashSet( + "information_schema", "mysql", "performance_schema", "sys", "test", "hivedb"); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java new file mode 100644 index 000000000..62559037d --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java @@ -0,0 +1,216 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.hive.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.apache.commons.collections4.MapUtils; +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Socket; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Slf4j +public class HiveJdbcDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return HiveJdbcOptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return HiveJdbcOptionRule.metadataRule(); + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + return getTables(pluginName, requestParams, database); + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + try { + return getDataBaseNames(pluginName, requestParams); + } catch (SQLException e) { + log.error("Query Hive databases error, request params is {}", requestParams, e); + throw new DataSourcePluginException("Query Hive databases error,", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + return checkJdbcConnectivity(requestParams); + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + return getTableFields(requestParams, database, table); + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + Map> tableFields = new HashMap<>(tables.size()); + for (String table : tables) { + tableFields.put(table, getTableFields(requestParams, database, table)); + } + return tableFields; + } + + protected boolean checkJdbcConnectivity(Map requestParams) { + try (Connection ignored = init(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException( + "check jdbc connectivity failed, " + e.getMessage(), e); + } + } + + protected Connection init(Map requestParams) throws SQLException { + if (MapUtils.isEmpty(requestParams)) { + throw new DataSourcePluginException( + "Hive jdbc request params is null, please check your config"); + } + String url = requestParams.get(HiveJdbcOptionRule.URL.key()); + return DriverManager.getConnection(url); + } + + protected List getDataBaseNames(String pluginName, Map requestParams) + throws SQLException { + List dbNames = new ArrayList<>(); + try (Connection connection = init(requestParams); + Statement statement = connection.createStatement(); ) { + ResultSet re = statement.executeQuery("SHOW DATABASES;"); + // filter system databases + while (re.next()) { + String dbName = re.getString("database"); + if (StringUtils.isNotBlank(dbName) && isNotSystemDatabase(pluginName, dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } + } + + protected List getTableNames(Map requestParams, String dbName) { + List tableNames = new ArrayList<>(); + try (Connection connection = init(requestParams); ) { + ResultSet resultSet = + connection.getMetaData().getTables(dbName, null, null, new String[] {"TABLE"}); + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + protected List getTableFields( + Map requestParams, String dbName, String tableName) { + List tableFields = new ArrayList<>(); + try (Connection connection = init(requestParams); ) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, dbName, tableName); + ResultSet resultSet = metaData.getColumns(dbName, null, tableName, null); + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + boolean isNullable = convertToBoolean(nullable); + tableField.setNullable(isNullable); + tableFields.add(tableField); + } + } catch (SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + @SuppressWarnings("checkstyle:MagicNumber") + private static boolean checkHostConnectable(String host, int port) { + try (Socket socket = new Socket()) { + socket.connect(new InetSocketAddress(host, port), 1000); + return true; + } catch (IOException e) { + return false; + } + } + + private boolean isNotSystemDatabase(String pluginName, String dbName) { + // FIXME,filters system databases + return true; + } + + private boolean convertToBoolean(Object value) { + if (value instanceof Boolean) { + return (Boolean) value; + } + if (value instanceof String) { + return value.equals("TRUE"); + } + return false; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceFactory.java new file mode 100644 index 000000000..b149cc3b7 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceFactory.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.hive.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import java.util.HashSet; +import java.util.Set; + +public class HiveJdbcDataSourceFactory implements DataSourceFactory { + @Override + public String factoryIdentifier() { + return "Hive-JDBC"; + } + + @Override + public Set supportedDataSources() { + DataSourcePluginInfo dataSourcePluginInfo = + DataSourcePluginInfo.builder() + .name("Hive-JDBC") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .version("1.0.0") + .icon("Hive-JDBC") + .supportVirtualTables(false) + .build(); + Set dataSourceInfos = new HashSet<>(); + dataSourceInfos.add(dataSourcePluginInfo); + return dataSourceInfos; + } + + @Override + public DataSourceChannel createChannel() { + return new HiveJdbcDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcOptionRule.java new file mode 100644 index 000000000..fb004b69f --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcOptionRule.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.hive.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +public class HiveJdbcOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + + "jdbc:hive2://localhost:10000/default?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static OptionRule optionRule() { + return OptionRule.builder().required(URL).build(); + } + + public static OptionRule metadataRule() { + // todo + return OptionRule.builder().build(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/pom.xml new file mode 100644 index 000000000..39a6fa462 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/pom.xml @@ -0,0 +1,62 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-mysql + + + 8.0.28 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + mysql + mysql-connector-java + ${mysql-connector.version} + provided + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlDataSourceConfig.java new file mode 100644 index 000000000..03d6d0cf3 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlDataSourceConfig.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.mysql.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class MysqlDataSourceConfig { + + public static final String PLUGIN_NAME = "JDBC-Mysql"; + + public static final DataSourcePluginInfo MYSQL_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); + + public static final Set MYSQL_SYSTEM_DATABASES = + Sets.newHashSet("information_schema", "mysql", "performance_schema", "sys"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(MysqlOptionRule.URL, MysqlOptionRule.DRIVER) + .optional(MysqlOptionRule.USER, MysqlOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder().required(MysqlOptionRule.DATABASE, MysqlOptionRule.TABLE).build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceChannel.java new file mode 100644 index 000000000..99ab042b9 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceChannel.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.mysql.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class MysqlJdbcDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return MysqlDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return MysqlDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + ResultSet resultSet = + connection + .getMetaData() + .getTables(database, null, null, new String[] {"TABLE"})) { + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;"); + ResultSet re = statement.executeQuery()) { + // filter system databases + while (re.next()) { + String dbName = re.getString("database"); + if (StringUtils.isNotBlank(dbName) + && !MysqlDataSourceConfig.MYSQL_SYSTEM_DATABASES.contains(dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("Get databases failed", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database)) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + try (ResultSet resultSet = metaData.getColumns(database, null, table, null)) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return tables.parallelStream() + .collect( + Collectors.toMap( + Function.identity(), + table -> + getTableFields( + pluginName, requestParams, database, table))); + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(MysqlOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(MysqlOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(MysqlOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(MysqlOptionRule.USER.key())) { + String username = requestParams.get(MysqlOptionRule.USER.key()); + String password = requestParams.get(MysqlOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceFactory.java new file mode 100644 index 000000000..31f6439bf --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceFactory.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.mysql.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; +import lombok.extern.slf4j.Slf4j; + +import java.util.Set; + +@Slf4j +@AutoService(DataSourceFactory.class) +public class MysqlJdbcDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return MysqlDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(MysqlDataSourceConfig.MYSQL_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new MysqlJdbcDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlOptionRule.java new file mode 100644 index 000000000..e34696410 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlOptionRule.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.mysql.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class MysqlOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + + " jdbc:mysql://localhost:3306/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .defaultValue(DriverType.MYSQL) + .withDescription("driver"); + + public enum DriverType { + MYSQL("com.mysql.cj.jdbc.Driver"), + ; + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/pom.xml new file mode 100644 index 000000000..9e108276c --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/pom.xml @@ -0,0 +1,62 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-oracle + + + 21.5.0.0 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + com.oracle.database.jdbc + ojdbc8 + ${oracle-jdbc.version} + provided + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceChannel.java new file mode 100644 index 000000000..87a04fcf6 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceChannel.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.oracle.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class OracleDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return OracleDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return OracleDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + ResultSet resultSet = + connection + .getMetaData() + .getTables(database, null, null, new String[] {"TABLE"}); ) { + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;"); + ResultSet re = statement.executeQuery()) { + // filter system databases + while (re.next()) { + String dbName = re.getString("database"); + if (StringUtils.isNotBlank(dbName) + && !OracleDataSourceConfig.ORACLE_SYSTEM_DATABASES.contains(dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (Exception ex) { + throw new RuntimeException("get databases failed", ex); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database)) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + try (ResultSet resultSet = metaData.getColumns(database, null, table, null)) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return null; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(OracleOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(OracleOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(OracleOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(OracleOptionRule.USER.key())) { + String username = requestParams.get(OracleOptionRule.USER.key()); + String password = requestParams.get(OracleOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceConfig.java new file mode 100644 index 000000000..83455b6d7 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceConfig.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.oracle.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class OracleDataSourceConfig { + + public static final String PLUGIN_NAME = "JDBC-Oracle"; + + public static final DataSourcePluginInfo ORACLE_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); + + public static final Set ORACLE_SYSTEM_DATABASES = + Sets.newHashSet("SYS", "SYSTEM", "SYSDBA", "SYSOPER", "HR", "SCOTT"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(OracleOptionRule.URL, OracleOptionRule.DRIVER) + .optional(OracleOptionRule.USER, OracleOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder() + .required(OracleOptionRule.DATABASE, OracleOptionRule.TABLE) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleJdbcDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleJdbcDataSourceFactory.java new file mode 100644 index 000000000..b6a0aec9c --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleJdbcDataSourceFactory.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.oracle.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; +import lombok.extern.slf4j.Slf4j; + +import java.util.Set; + +@Slf4j +@AutoService(DataSourceFactory.class) +public class OracleJdbcDataSourceFactory implements DataSourceFactory { + @Override + public String factoryIdentifier() { + return OracleDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(OracleDataSourceConfig.ORACLE_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new OracleDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleOptionRule.java new file mode 100644 index 000000000..f3ec40e33 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleOptionRule.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.oracle.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class OracleOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription("jdbc url, eg:" + "jdbc:oracle:thin:@localhost:1521:XE"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .defaultValue(DriverType.ORACLE) + .withDescription("driver"); + + public enum DriverType { + ORACLE("oracle.jdbc.driver.OracleDriver"), + ; + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/pom.xml new file mode 100644 index 000000000..9b07fb21a --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/pom.xml @@ -0,0 +1,61 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-postgresql + + + 42.4.3 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + org.postgresql + postgresql + ${postgresql.version} + provided + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceChannel.java new file mode 100644 index 000000000..e43939536 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceChannel.java @@ -0,0 +1,185 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.postgresql.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class PostgresqlDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return PostgresqlDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return PostgresqlDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + String query = "SELECT table_schema, table_name FROM information_schema.tables"; + try (Connection connection = getConnection(requestParams, database)) { + try (Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(query)) { + while (resultSet.next()) { + String schemaName = resultSet.getString("table_schema"); + String tableName = resultSet.getString("table_name"); + if (StringUtils.isNotBlank(schemaName) + && !PostgresqlDataSourceConfig.POSTGRESQL_SYSTEM_DATABASES.contains( + schemaName)) { + tableNames.add(schemaName + "." + tableName); + } + } + } + return tableNames; + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + PreparedStatement statement = + connection.prepareStatement("select datname from pg_database;"); + ResultSet re = statement.executeQuery()) { + while (re.next()) { + String dbName = re.getString("datname"); + if (StringUtils.isNotBlank(dbName) + && !PostgresqlDataSourceConfig.POSTGRESQL_SYSTEM_DATABASES.contains( + dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("get databases failed", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database); ) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + String[] split = table.split("\\."); + if (split.length != 2) { + throw new DataSourcePluginException( + "Postgresql tableName should composed by schemaName.tableName"); + } + try (ResultSet resultSet = metaData.getColumns(database, split[0], split[1], null)) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return null; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(PostgresqlOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(PostgresqlOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(PostgresqlOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(PostgresqlOptionRule.USER.key())) { + String username = requestParams.get(PostgresqlOptionRule.USER.key()); + String password = requestParams.get(PostgresqlOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceConfig.java new file mode 100644 index 000000000..f150ab5d0 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceConfig.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.postgresql.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class PostgresqlDataSourceConfig { + + public static final String PLUGIN_NAME = "JDBC-Postgres"; + + public static final DataSourcePluginInfo POSTGRESQL_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); + + public static final Set POSTGRESQL_SYSTEM_DATABASES = + Sets.newHashSet( + "information_schema", + "pg_catalog", + "root", + "pg_toast", + "pg_temp_1", + "pg_toast_temp_1", + "postgres", + "template0", + "template1"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(PostgresqlOptionRule.URL, PostgresqlOptionRule.DRIVER) + .optional(PostgresqlOptionRule.USER, PostgresqlOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder() + .required(PostgresqlOptionRule.DATABASE, PostgresqlOptionRule.TABLE) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceFactory.java new file mode 100644 index 000000000..13dd7847c --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceFactory.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.postgresql.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; + +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class PostgresqlDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return PostgresqlDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(PostgresqlDataSourceConfig.POSTGRESQL_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new PostgresqlDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlOptionRule.java new file mode 100644 index 000000000..748c4ea79 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlOptionRule.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.postgresql.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class PostgresqlOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + + "jdbc:postgresql://localhost:5432//test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .defaultValue(DriverType.POSTGRESQL) + .withDescription("driver"); + + public enum DriverType { + POSTGRESQL("org.postgresql.Driver"), + ; + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/pom.xml new file mode 100644 index 000000000..440e4a451 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/pom.xml @@ -0,0 +1,62 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-redshift + + + 2.1.0.10 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + com.amazon.redshift + redshift-jdbc42 + ${redshift.version} + provided + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceChannel.java new file mode 100644 index 000000000..0e3dfa709 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceChannel.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class RedshiftDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return RedshiftDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return RedshiftDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database); + ResultSet resultSet = + connection.getMetaData().getTables(database, null, null, null); ) { + while (resultSet.next()) { + String schemaName = resultSet.getString("TABLE_SCHEM"); + String tableName = resultSet.getString("TABLE_NAME"); + // todo: use isNotSystemSchemaName + if (StringUtils.isNotBlank(schemaName) + && !RedshiftDataSourceConfig.REDSHIFT_SYSTEM_TABLES.contains(schemaName)) { + tableNames.add(schemaName + "." + tableName); + } + } + return tableNames; + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + PreparedStatement statement = + connection.prepareStatement("select datname from pg_database;"); + ResultSet re = statement.executeQuery()) { + while (re.next()) { + String dbName = re.getString("datname"); + if (StringUtils.isNotBlank(dbName) + && !RedshiftDataSourceConfig.REDSHIFT_SYSTEM_TABLES.contains(dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("get databases failed", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database); ) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + String[] split = table.split("\\."); + if (split.length != 2) { + throw new DataSourcePluginException( + "Postgresql tableName should composed by schemaName.tableName"); + } + try (ResultSet resultSet = metaData.getColumns(database, split[0], split[1], null)) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return null; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(RedshiftOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(RedshiftOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(RedshiftOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(RedshiftOptionRule.USER.key())) { + String username = requestParams.get(RedshiftOptionRule.USER.key()); + String password = requestParams.get(RedshiftOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceConfig.java new file mode 100644 index 000000000..b5561f09e --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceConfig.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class RedshiftDataSourceConfig { + + public static final String PLUGIN_NAME = "JDBC-Redshift"; + + public static final DataSourcePluginInfo REDSHIFT_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon("redshift") + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); + + public static final Set REDSHIFT_SYSTEM_TABLES = + Sets.newHashSet( + "information_schema", + "pg_catalog", + "root", + "pg_toast", + "pg_temp_1", + "pg_toast_temp_1", + "postgres", + "template0", + "template1"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(RedshiftOptionRule.URL, RedshiftOptionRule.DRIVER) + .optional(RedshiftOptionRule.USER, RedshiftOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder() + .required(RedshiftOptionRule.DATABASE, RedshiftOptionRule.TABLE) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceFactory.java new file mode 100644 index 000000000..bf33a24c3 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceFactory.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; +import lombok.extern.slf4j.Slf4j; + +import java.util.Set; + +@Slf4j +@AutoService(DataSourceFactory.class) +public class RedshiftDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return RedshiftDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(RedshiftDataSourceConfig.REDSHIFT_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new RedshiftDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftOptionRule.java new file mode 100644 index 000000000..c4f389545 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftOptionRule.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class RedshiftOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + + "jdbc:redshift://server.redshift.amazonaws.com:5439/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .defaultValue(DriverType.JDBC42_REDSHIFT) + .withDescription("driver"); + + public enum DriverType { + JDBC42_REDSHIFT("com.amazon.redshift.jdbc42.Driver"), + ; + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/pom.xml new file mode 100644 index 000000000..1ff5b6bca --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/pom.xml @@ -0,0 +1,62 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-sqlserver + + + 9.2.1.jre8 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + com.microsoft.sqlserver + mssql-jdbc + ${sqlserver.version} + provided + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceChannel.java new file mode 100644 index 000000000..6ad2e9eb1 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceChannel.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.sqlserver.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.google.common.base.Preconditions.checkNotNull; + +@Slf4j +public class SqlServerDataSourceChannel implements DataSourceChannel { + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return SqlServerDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return SqlServerDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + ResultSet resultSet = + connection + .getMetaData() + .getTables(database, null, null, new String[] {"TABLE"})) { + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + PreparedStatement statement = + connection.prepareStatement( + "SELECT name FROM sys.databases WHERE name NOT IN ('master', 'tempdb', 'model', 'msdb');"); + ResultSet re = statement.executeQuery()) { + // filter system databases + while (re.next()) { + String dbName = re.getString("name"); + if (StringUtils.isNotBlank(dbName) + && !SqlServerDataSourceConfig.SQLSERVER_SYSTEM_DATABASES.contains(dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (Exception ex) { + throw new RuntimeException("get databases failed", ex); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, null)) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + try (ResultSet resultSet = metaData.getColumns(database, null, table, null)) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return null; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(SqlServerOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(SqlServerOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(SqlServerOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(SqlServerOptionRule.USER.key())) { + String username = requestParams.get(SqlServerOptionRule.USER.key()); + String password = requestParams.get(SqlServerOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceConfig.java new file mode 100644 index 000000000..218464abe --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceConfig.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.sqlserver.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class SqlServerDataSourceConfig { + + public static final String PLUGIN_NAME = "JDBC-SQLServer"; + + public static final DataSourcePluginInfo SQLSERVER_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); + + public static final Set SQLSERVER_SYSTEM_DATABASES = + Sets.newHashSet( + "master", + "tempdb", + "model", + "msdb", + "ReportServer", + "ReportServerTempDB", + "SSISDB"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(SqlServerOptionRule.URL, SqlServerOptionRule.DRIVER) + .optional(SqlServerOptionRule.USER, SqlServerOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder() + .required(SqlServerOptionRule.DATABASE, SqlServerOptionRule.TABLE) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceFactory.java new file mode 100644 index 000000000..b9a21ce49 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceFactory.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.sqlserver.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; +import lombok.extern.slf4j.Slf4j; + +import java.util.Set; + +@Slf4j +@AutoService(DataSourceFactory.class) +public class SqlServerDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return SqlServerDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(SqlServerDataSourceConfig.SQLSERVER_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new SqlServerDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerOptionRule.java new file mode 100644 index 000000000..6052a7ab1 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerOptionRule.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.sqlserver.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class SqlServerOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + "jdbc:sqlserver://localhost:1433;database=xx"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .defaultValue(DriverType.SQL_SERVER) + .withDescription("driver"); + + public enum DriverType { + SQL_SERVER("com.microsoft.sqlserver.jdbc.SQLServerDriver"), + ; + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/pom.xml new file mode 100644 index 000000000..764198e54 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/pom.xml @@ -0,0 +1,61 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-starrocks + + + 8.0.28 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + mysql + mysql-connector-java + ${mysql-connector.version} + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksDataSourceConfig.java new file mode 100644 index 000000000..913bc5ced --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksDataSourceConfig.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class StarRocksDataSourceConfig { + + public static final String PLUGIN_NAME = "JDBC-StarRocks"; + + public static final DataSourcePluginInfo STAR_ROCKS_DATA_SOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); + + public static final Set STAR_ROCKS_SYSTEM_DATABASES = + Sets.newHashSet("_statistics_", "information_schema"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(StarRocksOptionRule.URL, StarRocksOptionRule.DRIVER) + .optional(StarRocksOptionRule.USER, StarRocksOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder() + .required(StarRocksOptionRule.DATABASE, StarRocksOptionRule.TABLE) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceChannel.java new file mode 100644 index 000000000..65f0f6136 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceChannel.java @@ -0,0 +1,176 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class StarRocksJdbcDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return StarRocksDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return StarRocksDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + ResultSet resultSet = + connection + .getMetaData() + .getTables(database, null, null, new String[] {"TABLE"})) { + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;"); + ResultSet re = statement.executeQuery()) { + // filter system databases + while (re.next()) { + String dbName = re.getString("database"); + if (StringUtils.isNotBlank(dbName) + && !StarRocksDataSourceConfig.STAR_ROCKS_SYSTEM_DATABASES.contains( + dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (Exception ex) { + throw new DataSourcePluginException("get databases failed", ex); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database)) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + try (ResultSet resultSet = metaData.getColumns(database, null, table, null); ) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return null; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(StarRocksOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(StarRocksOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(StarRocksOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(StarRocksOptionRule.USER.key())) { + String username = requestParams.get(StarRocksOptionRule.USER.key()); + String password = requestParams.get(StarRocksOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceFactory.java new file mode 100644 index 000000000..65620d0ec --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceFactory.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; + +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class StarRocksJdbcDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return StarRocksDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(StarRocksDataSourceConfig.STAR_ROCKS_DATA_SOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new StarRocksJdbcDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksOptionRule.java new file mode 100644 index 000000000..08f2dc21b --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksOptionRule.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class StarRocksOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + + "jdbc:mysql://localhost:9030/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .defaultValue(DriverType.MYSQL) + .withDescription("driver"); + + public enum DriverType { + MYSQL("com.mysql.cj.jdbc.Driver"), + ; + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/pom.xml new file mode 100644 index 000000000..559a29361 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/pom.xml @@ -0,0 +1,61 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-jdbc-tidb + + + 8.0.28 + + + + + org.apache.seatunnel + datasource-plugins-api + 1.0.0-SNAPSHOT + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + + + mysql + mysql-connector-java + ${mysql-connector.version} + provided + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbDataSourceConfig.java new file mode 100644 index 000000000..c9acb5b6b --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbDataSourceConfig.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.tidb.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.common.collect.Sets; + +import java.util.Set; + +public class TidbDataSourceConfig { + + public static final String PLUGIN_NAME = "JDBC-TiDB"; + + public static final DataSourcePluginInfo TIDB_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); + + public static final Set TIDB_SYSTEM_DATABASES = + Sets.newHashSet("information_schema", "mysql", "performance_schema", "metrics_schema"); + + public static final OptionRule OPTION_RULE = + OptionRule.builder() + .required(TidbOptionRule.URL, TidbOptionRule.DRIVER) + .optional(TidbOptionRule.USER, TidbOptionRule.PASSWORD) + .build(); + + public static final OptionRule METADATA_RULE = + OptionRule.builder().required(TidbOptionRule.DATABASE, TidbOptionRule.TABLE).build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceChannel.java new file mode 100644 index 000000000..90688bbb5 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceChannel.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.tidb.jdbc; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; + +import lombok.NonNull; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static com.google.common.base.Preconditions.checkNotNull; + +public class TidbJdbcDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return TidbDataSourceConfig.OPTION_RULE; + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return TidbDataSourceConfig.METADATA_RULE; + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + List tableNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + ResultSet resultSet = + connection + .getMetaData() + .getTables(database, null, null, new String[] {"TABLE"})) { + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + List dbNames = new ArrayList<>(); + try (Connection connection = getConnection(requestParams); + PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;"); + ResultSet re = statement.executeQuery()) { + // filter system databases + while (re.next()) { + String dbName = re.getString("database"); + if (StringUtils.isNotBlank(dbName) + && !TidbDataSourceConfig.TIDB_SYSTEM_DATABASES.contains(dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (SQLException | ClassNotFoundException e) { + throw new DataSourcePluginException("Get databases failed", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (Connection ignored = getConnection(requestParams)) { + return true; + } catch (Exception e) { + throw new DataSourcePluginException("check jdbc connectivity failed", e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + List tableFields = new ArrayList<>(); + try (Connection connection = getConnection(requestParams, database)) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, database, table); + try (ResultSet resultSet = metaData.getColumns(database, null, table, null)) { + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString())); + tableFields.add(tableField); + } + } + } catch (ClassNotFoundException | SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + return tables.parallelStream() + .collect( + Collectors.toMap( + Function.identity(), + table -> + getTableFields( + pluginName, requestParams, database, table))); + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private Connection getConnection(Map requestParams) + throws SQLException, ClassNotFoundException { + return getConnection(requestParams, null); + } + + private Connection getConnection(Map requestParams, String databaseName) + throws SQLException, ClassNotFoundException { + checkNotNull(requestParams.get(TidbOptionRule.DRIVER.key())); + checkNotNull(requestParams.get(TidbOptionRule.URL.key()), "Jdbc url cannot be null"); + String url = + JdbcUtils.replaceDatabase( + requestParams.get(TidbOptionRule.URL.key()), databaseName); + if (requestParams.containsKey(TidbOptionRule.USER.key())) { + String username = requestParams.get(TidbOptionRule.USER.key()); + String password = requestParams.get(TidbOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceFactory.java new file mode 100644 index 000000000..49906bc95 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceFactory.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.tidb.jdbc; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; +import lombok.extern.slf4j.Slf4j; + +import java.util.Set; + +@Slf4j +@AutoService(DataSourceFactory.class) +public class TidbJdbcDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return TidbDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet(TidbDataSourceConfig.TIDB_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new TidbJdbcDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbOptionRule.java new file mode 100644 index 000000000..7ae009170 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbOptionRule.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.tidb.jdbc; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; + +public class TidbOptionRule { + + public static final Option URL = + Options.key("url") + .stringType() + .noDefaultValue() + .withDescription( + "jdbc url, eg:" + + " jdbc:mysql://localhost:3306/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static final Option USER = + Options.key("user").stringType().noDefaultValue().withDescription("jdbc user"); + + public static final Option PASSWORD = + Options.key("password").stringType().noDefaultValue().withDescription("jdbc password"); + + public static final Option DATABASE = + Options.key("database").stringType().noDefaultValue().withDescription("jdbc database"); + + public static final Option TABLE = + Options.key("table").stringType().noDefaultValue().withDescription("jdbc table"); + + public static final Option DRIVER = + Options.key("driver") + .enumType(DriverType.class) + .defaultValue(DriverType.MYSQL) + .withDescription("driver"); + + public enum DriverType { + MYSQL("com.mysql.cj.jdbc.Driver"), + ; + private final String driverClassName; + + DriverType(String driverClassName) { + this.driverClassName = driverClassName; + } + + public String getDriverClassName() { + return driverClassName; + } + + @Override + public String toString() { + return driverClassName; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/pom.xml new file mode 100644 index 000000000..ae68a64c5 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/pom.xml @@ -0,0 +1,64 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-kafka + + + 3.2.0 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + common-lang3 + + + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + org.apache.kafka + kafka-clients + ${kafka.client.version} + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannel.java new file mode 100644 index 000000000..9413bcae1 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannel.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.kafka; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.DescribeClusterOptions; +import org.apache.kafka.clients.admin.DescribeClusterResult; + +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static com.google.common.base.Preconditions.checkArgument; + +@Slf4j +public class KafkaDataSourceChannel implements DataSourceChannel { + + private static final String DATABASE = "default"; + private static final DescribeClusterOptions DEFAULT_TIMEOUT_OPTIONS = + new DescribeClusterOptions().timeoutMs(60 * 1000); + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return KafkaOptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return KafkaOptionRule.metadataRule(); + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + checkArgument(StringUtils.equalsIgnoreCase(database, DATABASE), "database must be default"); + try (AdminClient adminClient = createAdminClient(requestParams)) { + Set strings = adminClient.listTopics().names().get(); + return new ArrayList<>(strings); + } catch (Exception ex) { + throw new DataSourcePluginException( + "check kafka connectivity failed, " + ex.getMessage(), ex); + } + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + return DEFAULT_DATABASES; + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try (AdminClient adminClient = createAdminClient(requestParams)) { + // just test the connection + DescribeClusterResult describeClusterResult = + adminClient.describeCluster(DEFAULT_TIMEOUT_OPTIONS); + return CollectionUtils.isNotEmpty(describeClusterResult.nodes().get()); + } catch (Exception ex) { + throw new DataSourcePluginException( + "check kafka connectivity failed, " + ex.getMessage(), ex); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + checkArgument(StringUtils.equalsIgnoreCase(database, DATABASE), "database must be default"); + return Collections.emptyList(); + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + checkArgument(StringUtils.equalsIgnoreCase(database, DATABASE), "database must be default"); + return Collections.emptyMap(); + } + + private AdminClient createAdminClient(Map requestParams) { + return AdminClient.create( + KafkaRequestParamsUtils.parsePropertiesFromRequestParams(requestParams)); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceFactory.java new file mode 100644 index 000000000..16b258cdf --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceFactory.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.kafka; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; + +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class KafkaDataSourceFactory implements DataSourceFactory { + + public static final String KAFKA_PLUGIN_NAME = "Kafka"; + public static final String KAFKA_PLUGIN_ICON = "kafka"; + public static final String KAFKA_PLUGIN_VERSION = "1.0.0"; + + @Override + public String factoryIdentifier() { + return KAFKA_PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Sets.newHashSet( + DataSourcePluginInfo.builder() + .name(KAFKA_PLUGIN_NAME) + .icon(KAFKA_PLUGIN_ICON) + .version(KAFKA_PLUGIN_VERSION) + .supportVirtualTables(true) + .type(DatasourcePluginTypeEnum.NO_STRUCTURED.getCode()) + .build()); + } + + @Override + public DataSourceChannel createChannel() { + return new KafkaDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaOptionRule.java new file mode 100644 index 000000000..b599bb882 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaOptionRule.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.kafka; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +import java.util.Map; + +public class KafkaOptionRule { + + public static final Option BOOTSTRAP_SERVERS = + Options.key("bootstrap.servers") + .stringType() + .noDefaultValue() + .withDescription("Kafka cluster address, separated by \",\"."); + public static final Option TOPIC = + Options.key("topic") + .stringType() + .noDefaultValue() + .withDescription( + "Kafka topic name. If there are multiple topics, use , to split, for example: \"tpc1,tpc2\"."); + + public static final Option PATTERN = + Options.key("pattern") + .booleanType() + .defaultValue(false) + .withDescription( + "If pattern is set to true,the regular expression for a pattern of topic names to read from." + + " All topics in clients with names that match the specified regular expression will be subscribed by the consumer."); + + public static final Option> KAFKA_CONFIG = + Options.key("kafka.config") + .mapType() + .noDefaultValue() + .withDescription( + "{\n" + + "client.id=client_1\n" + + "max.poll.records=500\n" + + "auto.offset.reset=earliest\n" + + "enable.auto.commit=false\n" + + "}"); + + public static OptionRule optionRule() { + return OptionRule.builder().required(BOOTSTRAP_SERVERS).optional(KAFKA_CONFIG).build(); + } + + public static OptionRule metadataRule() { + return OptionRule.builder().required(TOPIC).optional(PATTERN).build(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtils.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtils.java new file mode 100644 index 000000000..25e70c991 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtils.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.kafka; + +import org.apache.seatunnel.shade.com.typesafe.config.Config; +import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory; + +import org.apache.kafka.clients.admin.AdminClientConfig; + +import java.util.Map; +import java.util.Properties; + +import static com.google.common.base.Preconditions.checkArgument; + +public class KafkaRequestParamsUtils { + + public static Properties parsePropertiesFromRequestParams(Map requestParams) { + checkArgument( + requestParams.containsKey(KafkaOptionRule.BOOTSTRAP_SERVERS.key()), + String.format( + "Missing %s in requestParams", KafkaOptionRule.BOOTSTRAP_SERVERS.key())); + final Properties properties = new Properties(); + properties.put( + AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, + requestParams.get(KafkaOptionRule.BOOTSTRAP_SERVERS.key())); + if (requestParams.containsKey(KafkaOptionRule.KAFKA_CONFIG.key())) { + Config configObject = + ConfigFactory.parseString( + requestParams.get(KafkaOptionRule.KAFKA_CONFIG.key())); + configObject + .entrySet() + .forEach( + entry -> { + properties.put( + entry.getKey(), entry.getValue().unwrapped().toString()); + }); + } + return properties; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannelTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannelTest.java new file mode 100644 index 000000000..cdcb4ffa9 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannelTest.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.kafka; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import com.google.common.collect.ImmutableMap; +import lombok.extern.slf4j.Slf4j; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +// todo: use test container to test +@Slf4j +@Disabled +public class KafkaDataSourceChannelTest { + + private static final KafkaDataSourceChannel KAFKA_DATA_SOURCE_CHANNEL = + new KafkaDataSourceChannel(); + + private static final String KAFKA_PLUGIN_NAME = "kafka"; + private static final String BOOTSTRAP_SERVER = "localhost:9092"; + + private static final Map REQUEST_PARAMS = + new ImmutableMap.Builder() + .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), BOOTSTRAP_SERVER) + .build(); + + @Test + public void getDataSourceOptions() { + OptionRule dataSourceMetadataFieldsByDataSourceName = + KAFKA_DATA_SOURCE_CHANNEL.getDataSourceOptions(KAFKA_PLUGIN_NAME); + Assertions.assertEquals( + 1, dataSourceMetadataFieldsByDataSourceName.getRequiredOptions().size()); + } + + @Test + public void getDatasourceMetadataFieldsByDataSourceName() { + OptionRule datasourceMetadataFieldsByDataSourceName = + KAFKA_DATA_SOURCE_CHANNEL.getDatasourceMetadataFieldsByDataSourceName( + KAFKA_PLUGIN_NAME); + Assertions.assertEquals( + 2, datasourceMetadataFieldsByDataSourceName.getOptionalOptions().size()); + } + + @Test + public void getTables() { + List tables = + KAFKA_DATA_SOURCE_CHANNEL.getTables(KAFKA_PLUGIN_NAME, REQUEST_PARAMS, null); + log.info("{}", tables); + Assertions.assertNotNull(tables); + } + + @Test + public void getDatabases() { + List databases = + KAFKA_DATA_SOURCE_CHANNEL.getDatabases(KAFKA_PLUGIN_NAME, REQUEST_PARAMS); + log.info("{}", databases); + Assertions.assertNotNull(databases); + } + + @Test + public void checkDataSourceConnectivity() { + boolean dataSourceConnectivity = + KAFKA_DATA_SOURCE_CHANNEL.checkDataSourceConnectivity( + KAFKA_PLUGIN_NAME, REQUEST_PARAMS); + Assertions.assertTrue(dataSourceConnectivity); + } + + @Test + public void getTableFields() { + List tableFields = + KAFKA_DATA_SOURCE_CHANNEL.getTableFields(KAFKA_PLUGIN_NAME, REQUEST_PARAMS, "", ""); + log.info("{}", tableFields); + Assertions.assertTrue(tableFields.isEmpty()); + } + + @Test + public void testGetTableFields() { + Map> tableFields = + KAFKA_DATA_SOURCE_CHANNEL.getTableFields( + KAFKA_PLUGIN_NAME, REQUEST_PARAMS, "", Collections.emptyList()); + log.info("{}", tableFields); + Assertions.assertTrue(tableFields.isEmpty()); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtilsTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtilsTest.java new file mode 100644 index 000000000..2ce0842fa --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtilsTest.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.kafka; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import com.google.common.collect.ImmutableMap; + +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +class KafkaRequestParamsUtilsTest { + + @Test + void parsePropertiesFromRequestParams() { + Map requestParams = + new ImmutableMap.Builder() + .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), "localhost:9092") + .put( + KafkaOptionRule.KAFKA_CONFIG.key(), + "{" + "security.protocol = SASL_PLAINTEXT" + "}") + .build(); + Properties properties = + KafkaRequestParamsUtils.parsePropertiesFromRequestParams(requestParams); + Assertions.assertEquals("SASL_PLAINTEXT", properties.getProperty("security.protocol")); + } + + @Test + void parsePropertiesFromRequestParamsBadCase() { + Assertions.assertDoesNotThrow( + () -> + KafkaRequestParamsUtils.parsePropertiesFromRequestParams( + new ImmutableMap.Builder() + .put( + KafkaOptionRule.BOOTSTRAP_SERVERS.key(), + "localhost:9092") + .put(KafkaOptionRule.KAFKA_CONFIG.key(), "{}") + .build())); + + Assertions.assertThrows( + IllegalArgumentException.class, + () -> KafkaRequestParamsUtils.parsePropertiesFromRequestParams(new HashMap<>())); + + Assertions.assertDoesNotThrow( + () -> + KafkaRequestParamsUtils.parsePropertiesFromRequestParams( + new ImmutableMap.Builder() + .put( + KafkaOptionRule.BOOTSTRAP_SERVERS.key(), + "localhost:9092") + .put(KafkaOptionRule.KAFKA_CONFIG.key(), "") + .build())); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/pom.xml new file mode 100644 index 000000000..23fedfd59 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/pom.xml @@ -0,0 +1,66 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-mysql-cdc + + + 8.0.16 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + common-lang3 + + + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + mysql + mysql-connector-java + ${mysql.version} + provided + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceChannel.java new file mode 100644 index 000000000..1b63a97d3 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceChannel.java @@ -0,0 +1,249 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.mysql; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.apache.commons.lang3.StringUtils; + +import com.google.common.collect.Sets; +import lombok.NonNull; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class MysqlCDCDataSourceChannel implements DataSourceChannel { + + public static final Set MYSQL_SYSTEM_DATABASES = + Sets.newHashSet("information_schema", "mysql", "performance_schema", "sys"); + + @Override + public boolean canAbleGetSchema() { + return true; + } + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return MysqlCDCOptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return MysqlCDCOptionRule.metadataRule(); + } + + @Override + public List getTables( + String pluginName, Map requestParams, String database) { + return this.getTableNames(requestParams, database); + } + + @Override + public List getDatabases(String pluginName, Map requestParams) { + try { + return this.getDataBaseNames(requestParams); + } catch (SQLException e) { + throw new DataSourcePluginException("get databases failed", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + String pluginName, Map requestParams) { + return this.checkJdbcConnectivity(requestParams); + } + + @Override + public List getTableFields( + String pluginName, Map requestParams, String database, String table) { + return getTableFields(requestParams, database, table); + } + + @Override + public Map> getTableFields( + String pluginName, + Map requestParams, + String database, + List tables) { + Map> tableFields = new HashMap<>(tables.size()); + for (String table : tables) { + tableFields.put(table, getTableFields(requestParams, database, table)); + } + return tableFields; + } + + @SuppressWarnings("checkstyle:MagicNumber") + protected boolean checkJdbcConnectivity(Map requestParams) { + try (Connection connection = init(requestParams); + Statement statement = connection.createStatement()) { + + try (ResultSet resultSet = statement.executeQuery("SHOW MASTER STATUS"); ) { + if (resultSet.next()) { + String binlogFile = resultSet.getString("File"); + if (StringUtils.isBlank(binlogFile)) { + throw new DataSourcePluginException("binlog must be enabled"); + } + } else { + throw new DataSourcePluginException("binlog must be enabled"); + } + } + + try (ResultSet resultSet = + statement.executeQuery("SHOW VARIABLES LIKE 'binlog_format'")) { + if (resultSet.next()) { + String binlogFormat = resultSet.getString("Value"); + if (!"ROW".equalsIgnoreCase(binlogFormat)) { + throw new DataSourcePluginException("binlog_format must be ROW"); + } + } else { + throw new DataSourcePluginException("binlog_format must be ROW"); + } + } + + try (ResultSet resultSet = + statement.executeQuery("SHOW VARIABLES LIKE 'binlog_row_image'")) { + if (resultSet.next()) { + String binlogRowImage = resultSet.getString("Value"); + if (!"FULL".equalsIgnoreCase(binlogRowImage)) { + throw new DataSourcePluginException("binlog_row_image must be FULL"); + } + } else { + throw new DataSourcePluginException("binlog_row_image must be FULL"); + } + } + return true; + } catch (Exception e) { + throw new DataSourcePluginException( + "check jdbc connectivity failed, " + e.getMessage(), e); + } + } + + protected Connection init(Map requestParams) throws SQLException { + if (null == requestParams.get(MysqlCDCOptionRule.BASE_URL.key())) { + throw new DataSourcePluginException("Jdbc url is null"); + } + String url = requestParams.get(MysqlCDCOptionRule.BASE_URL.key()); + if (null != requestParams.get(MysqlCDCOptionRule.PASSWORD.key()) + && null != requestParams.get(MysqlCDCOptionRule.USERNAME.key())) { + String username = requestParams.get(MysqlCDCOptionRule.USERNAME.key()); + String password = requestParams.get(MysqlCDCOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } + + protected List getDataBaseNames(Map requestParams) throws SQLException { + List dbNames = new ArrayList<>(); + try (Connection connection = init(requestParams); + PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;"); + ResultSet re = statement.executeQuery()) { + // filter system databases + while (re.next()) { + String dbName = re.getString("database"); + if (StringUtils.isNotBlank(dbName) && isNotSystemDatabase(dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } + } + + protected List getTableNames(Map requestParams, String dbName) { + List tableNames = new ArrayList<>(); + try (Connection connection = init(requestParams); + ResultSet resultSet = + connection + .getMetaData() + .getTables(dbName, null, null, new String[] {"TABLE"})) { + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + protected List getTableFields( + Map requestParams, String dbName, String tableName) { + List tableFields = new ArrayList<>(); + try (Connection connection = init(requestParams); ) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, dbName, tableName); + ResultSet resultSet = metaData.getColumns(dbName, null, tableName, null); + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + boolean isNullable = convertToBoolean(nullable); + tableField.setNullable(isNullable); + tableFields.add(tableField); + } + } catch (SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private boolean isNotSystemDatabase(String dbName) { + return MYSQL_SYSTEM_DATABASES.stream() + .noneMatch(systemDatabase -> StringUtils.equalsIgnoreCase(systemDatabase, dbName)); + } + + private boolean convertToBoolean(Object value) { + if (value instanceof Boolean) { + return (Boolean) value; + } + if (value instanceof String) { + return value.equals("TRUE"); + } + return false; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceConfig.java new file mode 100644 index 000000000..b81d43599 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceConfig.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.mysql; + +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +public class MysqlCDCDataSourceConfig { + + public static final String PLUGIN_NAME = "MySQL-CDC"; + + public static final DataSourcePluginInfo MYSQL_CDC_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceFactory.java new file mode 100644 index 000000000..ac4db000a --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceFactory.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.mysql; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; + +import java.util.Collections; +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class MysqlCDCDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return "MySQL-CDC"; + } + + @Override + public Set supportedDataSources() { + return Collections.singleton(MysqlCDCDataSourceConfig.MYSQL_CDC_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new MysqlCDCDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCOptionRule.java new file mode 100644 index 000000000..c0104c250 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCOptionRule.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.mysql; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +public class MysqlCDCOptionRule { + + public static final Option BASE_URL = + Options.key("base-url") + .stringType() + .noDefaultValue() + .withDescription( + "URL has to be without database, like \"jdbc:mysql://localhost:5432/\" or" + + "\"jdbc:mysql://localhost:5432\" rather than \"jdbc:mysql://localhost:5432/db\""); + + public static final Option USERNAME = + Options.key("username") + .stringType() + .noDefaultValue() + .withDescription( + "Name of the database to use when connecting to the database server."); + + public static final Option PASSWORD = + Options.key("password") + .stringType() + .noDefaultValue() + .withDescription("Password to use when connecting to the database server."); + + public static final Option DATABASE_NAME = + Options.key("database-name") + .stringType() + .noDefaultValue() + .withDescription("Database name of the database to monitor."); + + public static final Option TABLE_NAME = + Options.key("table-name") + .stringType() + .noDefaultValue() + .withDescription("Table name of the database to monitor."); + public static final Option SERVER_TIME_ZONE = + Options.key("server-time-zone") + .stringType() + .defaultValue("UTC") + .withDescription("The session time zone in database server."); + + public static OptionRule optionRule() { + return OptionRule.builder() + .required(USERNAME, PASSWORD, BASE_URL) + .optional(SERVER_TIME_ZONE) + .build(); + } + + public static OptionRule metadataRule() { + return OptionRule.builder().required(DATABASE_NAME, TABLE_NAME).build(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/pom.xml new file mode 100644 index 000000000..7bdd419df --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/pom.xml @@ -0,0 +1,37 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-plugins-api + + + + org.apache.seatunnel + seatunnel-api + + + com.google.auto.service + auto-service + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourceChannel.java new file mode 100644 index 000000000..dade44577 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourceChannel.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import com.google.common.collect.ImmutableList; +import lombok.NonNull; + +import java.util.List; +import java.util.Map; + +public interface DataSourceChannel { + + List DEFAULT_DATABASES = ImmutableList.of("default"); + + /** + * get datasource metadata fields by datasource name + * + * @param pluginName plugin name + * @return datasource metadata fields + */ + OptionRule getDataSourceOptions(@NonNull String pluginName); + + /** + * get datasource metadata fields by datasource name + * + * @param pluginName plugin name + * @return datasource metadata fields + */ + OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName); + + List getTables( + @NonNull String pluginName, Map requestParams, String database); + + List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams); + + boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams); + + default boolean canAbleGetSchema() { + return false; + } + + List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table); + + Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables); + + /** + * just check metadata field is right and used by virtual table + * + * @param requestParams request param(connector params) + * @return true if right + */ + default Boolean checkMetadataFieldIsRight(Map requestParams) { + return true; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourceFactory.java new file mode 100644 index 000000000..966bb0330 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourceFactory.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api; + +import java.util.Set; + +public interface DataSourceFactory { + + String factoryIdentifier(); + + Set supportedDataSources(); + + DataSourceChannel createChannel(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginException.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginException.java new file mode 100644 index 000000000..8913c4a48 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api; + +public class DataSourcePluginException extends RuntimeException { + + public DataSourcePluginException(String message) { + super(message); + } + + public DataSourcePluginException(String message, Throwable cause) { + super(message, cause); + } + + public DataSourcePluginException(Throwable cause) { + super(cause); + } + + public DataSourcePluginException() { + super(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginInfo.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginInfo.java new file mode 100644 index 000000000..3c47d5a51 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginInfo.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api; + +import lombok.Builder; +import lombok.Data; + +import static com.google.common.base.Preconditions.checkNotNull; + +@Data +@Builder +public class DataSourcePluginInfo { + + public String name; + + public String icon; + + public String version; + + /** @see DatasourcePluginTypeEnum */ + private Integer type; + + /** whether support virtual tables, default false */ + private Boolean supportVirtualTables; + + public DataSourcePluginInfo( + String name, String icon, String version, Integer type, Boolean supportVirtualTables) { + this.name = checkNotNull(name, "name can not be null"); + this.icon = checkNotNull(icon, "icon can not be null"); + this.version = checkNotNull(version, "version can not be null"); + this.type = checkNotNull(type, "type can not be null"); + this.supportVirtualTables = supportVirtualTables != null && supportVirtualTables; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DatasourcePluginTypeEnum.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DatasourcePluginTypeEnum.java new file mode 100644 index 000000000..e5d8e0049 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DatasourcePluginTypeEnum.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api; + +import static com.google.common.base.Preconditions.checkNotNull; + +@SuppressWarnings("checkstyle:RegexpSingleline") +public enum DatasourcePluginTypeEnum { + DATABASE(1, "database", "传统数据库"), + FILE(2, "file", "文件"), + NO_STRUCTURED(3, "no_structured", "非结构化数据(NoSQLs)"), + STORAGE(4, "storage", "存储"), + REMOTE_CONNECTION(5, "remote_connection", "远程连接"); + + private final Integer code; + + private final String name; + + private final String chineseName; + + DatasourcePluginTypeEnum(Integer code, String name, String chineseName) { + this.code = checkNotNull(code); + this.name = checkNotNull(name); + this.chineseName = checkNotNull(chineseName); + } + + public Integer getCode() { + return code; + } + + public String getName() { + return name; + } + + public String getChineseName() { + return chineseName; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtils.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtils.java new file mode 100644 index 000000000..257d9f3d8 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtils.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api.common; + +import org.apache.seatunnel.common.utils.JsonUtils; + +import java.util.Map; + +public class ParamtersUtils { + /** + * for some parameters, we need to convert them to {@link Map} eg: s3Options { "access.value": + * "org.apache.hadoop.fs.s3a.S3AFileSystem", "access.key": "AKIAIOSFODNN7EXAMPLE", + * "hadoop_s3_properties": " fs.s3a.impl = org.apache.hadoop.fs.s3a.S3AFileSystem + * fs.s3a.access.key = AKIAIOSFODNN7EXAMPLE " + * + *

Convert parameters to {@link Map} + * + * @param parameters parameters {@link Map} + * @return {@link Map} + */ + public static Map convertParams(Map parameters) { + String json = JsonUtils.toJsonString(parameters); + return JsonUtils.toMap(json, String.class, Object.class); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/model/TableField.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/model/TableField.java new file mode 100644 index 000000000..b9820c602 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/model/TableField.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api.model; + +import lombok.Data; + +import java.util.Map; + +@Data +public class TableField { + + private String type; + + private String name; + + private String comment; + + private Boolean primaryKey; + + private String defaultValue; + + private Boolean nullable; + + private Map properties; + + private Boolean unSupport; + + private String outputDataType; +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/utils/JdbcUtils.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/utils/JdbcUtils.java new file mode 100644 index 000000000..8e7cb3d85 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/utils/JdbcUtils.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api.utils; + +public class JdbcUtils { + + public static String replaceDatabase(String jdbcUrl, String databaseName) { + if (databaseName == null) { + return jdbcUrl; + } + String[] split = jdbcUrl.split("\\?"); + if (split.length == 1) { + return replaceDatabaseWithoutParameter(jdbcUrl, databaseName); + } + return replaceDatabaseWithoutParameter(split[0], databaseName) + "?" + split[1]; + } + + private static String replaceDatabaseWithoutParameter(String jdbcUrl, String databaseName) { + int lastIndex = jdbcUrl.lastIndexOf(':'); + char[] chars = jdbcUrl.toCharArray(); + for (int i = lastIndex + 1; i < chars.length; i++) { + if (chars[i] == '/') { + return jdbcUrl.substring(0, i + 1) + databaseName; + } + } + return jdbcUrl + "/" + databaseName; + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/test/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtilsTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/test/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtilsTest.java new file mode 100644 index 000000000..98b8d7ee7 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/test/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtilsTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.api.common; + +import org.apache.seatunnel.common.utils.JsonUtils; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.HashMap; +import java.util.Map; + +public class ParamtersUtilsTest { + + @Test + public void testConvertParams() { + Map s3Options = new HashMap<>(); + s3Options.put("access.key", "myaccess"); + s3Options.put("access.value", "myvalue"); + Map hadoopConfig = new HashMap<>(); + hadoopConfig.put("fs.s3a.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem"); + s3Options.put("hadoopConfig", hadoopConfig); + String s3OptionsJson = JsonUtils.toJsonString(s3Options); + Map s3OptionsMap = + JsonUtils.toMap(s3OptionsJson, String.class, String.class); + + Map s3OptionsMapConvertResult = ParamtersUtils.convertParams(s3OptionsMap); + Assertions.assertEquals(s3OptionsMapConvertResult.get("hadoopConfig"), hadoopConfig); + Assertions.assertEquals( + s3OptionsMapConvertResult.get("access.key"), s3Options.get("access.key")); + Assertions.assertEquals( + s3OptionsMapConvertResult.get("access.value"), s3Options.get("access.value")); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/pom.xml new file mode 100644 index 000000000..61b84cb8b --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/pom.xml @@ -0,0 +1,69 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-s3-redshift + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.seatunnel + seatunnel-hadoop3-3.1.4-uber + 2.4.5-WS-SNAPSHOT + + + org.apache.avro + avro + + + + + com.amazon.redshift + redshift-jdbc42 + ${redshift.version} + provided + + + org.apache.hadoop + hadoop-aws + ${hadoop-aws.version} + + + jdk.tools + jdk.tools + + + + + com.amazonaws + aws-java-sdk-bundle + ${aws-java-sdk-bundle.version} + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/HadoopS3AConfiguration.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/HadoopS3AConfiguration.java new file mode 100644 index 000000000..8da509c12 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/HadoopS3AConfiguration.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.s3; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; + +import lombok.extern.slf4j.Slf4j; + +import java.util.Arrays; +import java.util.Map; + +import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY; + +@Slf4j +public class HadoopS3AConfiguration { + + /* S3 constants */ + private static final String S3A_SCHEMA = "s3a"; + private static final String HDFS_S3N_IMPL = "org.apache.hadoop.fs.s3native.NativeS3FileSystem"; + private static final String HDFS_S3A_IMPL = "org.apache.hadoop.fs.s3a.S3AFileSystem"; + private static final String S3A_PROTOCOL = "s3a"; + private static final String DEFAULT_PROTOCOL = "s3n"; + private static final String S3_FORMAT_KEY = "fs.%s.%s"; + private static final String HDFS_IMPL_KEY = "impl"; + + public static Configuration getConfiguration(Map s3Options) { + + if (!s3Options.containsKey(S3RedshiftOptionRule.BUCKET.key())) { + throw new IllegalArgumentException( + "S3Redshift datasource bucket is null, please check your config"); + } + if (!s3Options.containsKey(S3RedshiftOptionRule.FS_S3A_ENDPOINT.key())) { + throw new IllegalArgumentException( + "S3Redshift datasource endpoint is null, please check your config"); + } + String bucket = s3Options.get(S3RedshiftOptionRule.BUCKET.key()); + + String protocol = DEFAULT_PROTOCOL; + if (bucket.startsWith(S3A_PROTOCOL)) { + protocol = S3A_PROTOCOL; + } + String fsImpl = protocol.equals(S3A_PROTOCOL) ? HDFS_S3A_IMPL : HDFS_S3N_IMPL; + Configuration hadoopConf = new Configuration(); + hadoopConf.set(FS_DEFAULT_NAME_KEY, bucket); + hadoopConf.set( + S3RedshiftOptionRule.FS_S3A_ENDPOINT.key(), + s3Options.get(S3RedshiftOptionRule.FS_S3A_ENDPOINT.key())); + hadoopConf.set(formatKey(protocol, HDFS_IMPL_KEY), fsImpl); + if (s3Options.containsKey(S3RedshiftOptionRule.HADOOP_S3_PROPERTIES.key())) { + Arrays.stream( + s3Options + .get(S3RedshiftOptionRule.HADOOP_S3_PROPERTIES.key()) + .split("\n")) + .map(String::trim) + .filter(StringUtils::isNotBlank) + .forEach( + line -> { + String[] kv = line.split("="); + if (kv.length == 2) { + hadoopConf.set(kv[0].trim(), kv[1].trim()); + } + }); + } + if (S3RedshiftOptionRule.S3aAwsCredentialsProvider.SimpleAWSCredentialsProvider + .getProvider() + .equals(s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()))) { + hadoopConf.set( + S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(), + s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key())); + hadoopConf.set( + "fs.s3a.access.key", s3Options.get(S3RedshiftOptionRule.ACCESS_KEY.key())); + hadoopConf.set( + "fs.s3a.secret.key", s3Options.get(S3RedshiftOptionRule.SECRET_KEY.key())); + } else { + hadoopConf.set( + S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(), + s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key())); + } + return hadoopConf; + } + + private static String formatKey(String protocol, String key) { + return String.format(S3_FORMAT_KEY, protocol, key); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceChannel.java new file mode 100644 index 000000000..de9e2b09e --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceChannel.java @@ -0,0 +1,269 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.s3; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; + +import com.google.common.collect.Sets; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Socket; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +@Slf4j +public class S3RedshiftDataSourceChannel implements DataSourceChannel { + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return S3RedshiftOptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return S3RedshiftOptionRule.metadataRule(); + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + return getTableNames(requestParams, database); + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + try { + return getDataBaseNames(pluginName, requestParams); + } catch (SQLException e) { + throw new DataSourcePluginException("Query redshift databases failed", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + checkHdfsS3Connection(requestParams); + checkJdbcConnection(requestParams); + return true; + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + return getTableFields(requestParams, database, table); + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + // not need this method + return null; + } + + private void checkJdbcConnection(Map requestParams) { + String jdbcUrl = requestParams.get(S3RedshiftOptionRule.JDBC_URL.key()); + String username = requestParams.get(S3RedshiftOptionRule.JDBC_USER.key()); + String password = requestParams.get(S3RedshiftOptionRule.JDBC_PASSWORD.key()); + if (StringUtils.isBlank(jdbcUrl)) { + throw new DataSourcePluginException("Redshift Jdbc url is empty"); + } + if (StringUtils.isBlank(username) && StringUtils.isBlank(password)) { + try (Connection ignored = DriverManager.getConnection(jdbcUrl)) { + log.info("Redshift jdbc connection is valid"); + return; + } catch (SQLException e) { + throw new DataSourcePluginException( + "Check Redshift jdbc connection failed,please check your config", e); + } + } + try (Connection ignored = DriverManager.getConnection(jdbcUrl, username, password)) { + log.info("Redshift jdbc connection is valid"); + } catch (SQLException e) { + throw new DataSourcePluginException( + "Check Redshift jdbc connection failed,please check your config", e); + } + } + + private void checkHdfsS3Connection(Map requestParams) { + Configuration s3Conf = HadoopS3AConfiguration.getConfiguration(requestParams); + try (FileSystem fs = FileSystem.get(s3Conf)) { + fs.getFileStatus(new org.apache.hadoop.fs.Path("/")); + } catch (IOException e) { + throw new DataSourcePluginException( + "S3 configuration is invalid, please check your config", e); + } + } + + protected Connection init(Map requestParams, String databaseName) + throws SQLException { + if (null == requestParams.get(S3RedshiftOptionRule.JDBC_URL.key())) { + throw new DataSourcePluginException("Jdbc url is null"); + } + String url = + JdbcUtils.replaceDatabase( + requestParams.get(S3RedshiftOptionRule.JDBC_URL.key()), databaseName); + if (null != requestParams.get(S3RedshiftOptionRule.JDBC_PASSWORD.key()) + && null != requestParams.get(S3RedshiftOptionRule.JDBC_USER.key())) { + String username = requestParams.get(S3RedshiftOptionRule.JDBC_USER.key()); + String password = requestParams.get(S3RedshiftOptionRule.JDBC_PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } + + protected List getDataBaseNames(String pluginName, Map requestParams) + throws SQLException { + List dbNames = new ArrayList<>(); + try (Connection connection = init(requestParams, null); + PreparedStatement statement = + connection.prepareStatement("select datname from pg_database;"); + ResultSet re = statement.executeQuery()) { + while (re.next()) { + String dbName = re.getString("datname"); + if (StringUtils.isNotBlank(dbName) && isNotSystemDatabase(dbName)) { + dbNames.add(dbName); + } + } + return dbNames; + } catch (SQLException e) { + throw new DataSourcePluginException("get databases failed", e); + } + } + + protected List getTableNames(Map requestParams, String dbName) { + List tableNames = new ArrayList<>(); + try (Connection connection = init(requestParams, dbName); ) { + ResultSet resultSet = + connection.getMetaData().getTables(dbName, null, null, new String[] {"TABLE"}); + while (resultSet.next()) { + String tableName = resultSet.getString("TABLE_NAME"); + if (StringUtils.isNotBlank(tableName)) { + tableNames.add(tableName); + } + } + return tableNames; + } catch (SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + protected List getTableFields( + Map requestParams, String dbName, String tableName) { + List tableFields = new ArrayList<>(); + try (Connection connection = init(requestParams, dbName); ) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, dbName, tableName); + String[] split = tableName.split("\\."); + if (split.length != 2) { + throw new DataSourcePluginException( + "Postgresql tableName should composed by schemaName.tableName"); + } + ResultSet resultSet = metaData.getColumns(dbName, split[0], split[1], null); + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + boolean isNullable = convertToBoolean(nullable); + tableField.setNullable(isNullable); + tableFields.add(tableField); + } + } catch (SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + @SuppressWarnings("checkstyle:MagicNumber") + private static boolean checkHostConnectable(String host, int port) { + try (Socket socket = new Socket()) { + socket.connect(new InetSocketAddress(host, port), 1000); + return true; + } catch (IOException e) { + + throw new DataSourcePluginException("check host connectable failed", e); + } + } + + private boolean isNotSystemDatabase(String dbName) { + return !POSTGRESQL_SYSTEM_DATABASES.contains(dbName.toLowerCase()); + } + + private boolean convertToBoolean(Object value) { + if (value instanceof Boolean) { + return (Boolean) value; + } + if (value instanceof String) { + return value.equals("TRUE"); + } + return false; + } + + public static final Set POSTGRESQL_SYSTEM_DATABASES = + Sets.newHashSet( + "information_schema", + "pg_catalog", + "root", + "pg_toast", + "pg_temp_1", + "pg_toast_temp_1", + "postgres", + "template0", + "template1"); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceFactory.java new file mode 100644 index 000000000..ee6aa4ba0 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceFactory.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.s3; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; + +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class S3RedshiftDataSourceFactory implements DataSourceFactory { + @Override + public String factoryIdentifier() { + return "S3-Redshift"; + } + + @Override + public Set supportedDataSources() { + DataSourcePluginInfo s3DatasourcePluginInfo = + DataSourcePluginInfo.builder() + .name("S3-Redshift") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .version("1.0.0") + .supportVirtualTables(false) + .icon("S3-Redshift") + .icon("S3-Redshift") + .build(); + + return Sets.newHashSet(s3DatasourcePluginInfo); + } + + @Override + public DataSourceChannel createChannel() { + return new S3RedshiftDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftOptionRule.java new file mode 100644 index 000000000..5d69ee803 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftOptionRule.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.redshift.s3; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +import java.util.Arrays; +import java.util.Map; + +public class S3RedshiftOptionRule { + + public static final Option JDBC_URL = + Options.key("jdbc_url") + .stringType() + .noDefaultValue() + .withDescription( + "Redshift jdbc connection url, eg: jdbc:redshift://localhost:5439/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8"); + + public static final Option JDBC_USER = + Options.key("jdbc_user") + .stringType() + .noDefaultValue() + .withDescription("Redshift jdbc connection user"); + + public static final Option JDBC_PASSWORD = + Options.key("jdbc_password") + .stringType() + .noDefaultValue() + .withDescription("Redshift jdbc connection password"); + + public static final Option ACCESS_KEY = + Options.key("access_key") + .stringType() + .noDefaultValue() + .withDescription("S3 access key"); + + public static final Option SECRET_KEY = + Options.key("secret_key") + .stringType() + .noDefaultValue() + .withDescription("S3 secret key"); + + public static final Option BUCKET = + Options.key("bucket").stringType().noDefaultValue().withDescription("S3 bucket name"); + + public static final Option FS_S3A_ENDPOINT = + Options.key("fs.s3a.endpoint") + .stringType() + .noDefaultValue() + .withDescription("fs s3a endpoint"); + + public static final Option S3A_AWS_CREDENTIALS_PROVIDER = + Options.key("fs.s3a.aws.credentials.provider") + .enumType(S3aAwsCredentialsProvider.class) + .defaultValue(S3aAwsCredentialsProvider.InstanceProfileCredentialsProvider) + .withDescription("s3a aws credentials provider"); + + public static final Option> HADOOP_S3_PROPERTIES = + Options.key("hadoop_s3_properties") + .mapType() + .noDefaultValue() + .withDescription( + "{" + + "fs.s3a.buffer.dir = /data/st_test/s3a\n" + + "fs.s3a.fast.upload.buffer = disk\n" + + "}"); + + public static OptionRule optionRule() { + return OptionRule.builder() + .required( + JDBC_URL, + BUCKET, + FS_S3A_ENDPOINT, + S3A_AWS_CREDENTIALS_PROVIDER, + JDBC_USER, + JDBC_PASSWORD) + .optional(HADOOP_S3_PROPERTIES) + .conditional( + S3A_AWS_CREDENTIALS_PROVIDER, + S3aAwsCredentialsProvider.SimpleAWSCredentialsProvider, + ACCESS_KEY, + SECRET_KEY) + .build(); + } + + public static final Option PATH = + Options.key("path").stringType().noDefaultValue().withDescription("S3 write path"); + + public static final Option TYPE = + Options.key("file_format_type") + .enumType(FileFormat.class) + .noDefaultValue() + .withDescription("S3 write type"); + + public static final Option DELIMITER = + Options.key("delimiter") + .stringType() + .noDefaultValue() + .withDescription("S3 write delimiter"); + + public static final Option> SCHEMA = + Options.key("schema").mapType().noDefaultValue().withDescription("SeaTunnel Schema"); + + public static final Option PARSE_PARSE_PARTITION_FROM_PATH = + Options.key("parse_partition_from_path") + .booleanType() + .noDefaultValue() + .withDescription("S3 write parse_partition_from_path"); + + public static final Option DATE_FORMAT = + Options.key("date_format") + .stringType() + .noDefaultValue() + .withDescription("S3 write date_format"); + + public static final Option DATETIME_FORMAT = + Options.key("time_format") + .stringType() + .noDefaultValue() + .withDescription("S3 write time_format"); + + public static final Option TIME_FORMAT = + Options.key("datetime_format") + .stringType() + .noDefaultValue() + .withDescription("S3 write datetime_format"); + + public static OptionRule metadataRule() { + return OptionRule.builder() + .required(PATH, TYPE) + .conditional(TYPE, FileFormat.TEXT, DELIMITER) + .conditional(TYPE, Arrays.asList(FileFormat.TEXT, FileFormat.JSON), SCHEMA) + .optional(PARSE_PARSE_PARTITION_FROM_PATH) + .optional(DATE_FORMAT) + .optional(DATETIME_FORMAT) + .optional(TIME_FORMAT) + .build(); + } + + public enum S3aAwsCredentialsProvider { + SimpleAWSCredentialsProvider("org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider"), + + InstanceProfileCredentialsProvider("com.amazonaws.auth.InstanceProfileCredentialsProvider"); + + private String provider; + + S3aAwsCredentialsProvider(String provider) { + this.provider = provider; + } + + public String getProvider() { + return provider; + } + + @Override + public String toString() { + return provider; + } + } + + public enum FileFormat { + CSV("csv"), + TEXT("txt"), + PARQUET("parquet"), + ORC("orc"), + JSON("json"); + + private final String type; + + FileFormat(String type) { + this.type = type; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/pom.xml new file mode 100644 index 000000000..ae402efd2 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/pom.xml @@ -0,0 +1,48 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-s3 + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.seatunnel + seatunnel-hadoop3-3.1.4-uber + + + org.apache.hadoop + hadoop-aws + + + com.amazonaws + aws-java-sdk-bundle + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/HadoopS3AConfiguration.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/HadoopS3AConfiguration.java new file mode 100644 index 000000000..3c18b2965 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/HadoopS3AConfiguration.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.s3; + +import org.apache.seatunnel.shade.com.typesafe.config.Config; +import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory; + +import org.apache.hadoop.conf.Configuration; + +import lombok.extern.slf4j.Slf4j; + +import java.util.Map; + +import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY; + +@Slf4j +public class HadoopS3AConfiguration { + + /* S3 constants */ + private static final String S3A_SCHEMA = "s3a"; + private static final String HDFS_S3N_IMPL = "org.apache.hadoop.fs.s3native.NativeS3FileSystem"; + private static final String HDFS_S3A_IMPL = "org.apache.hadoop.fs.s3a.S3AFileSystem"; + private static final String S3A_PROTOCOL = "s3a"; + private static final String DEFAULT_PROTOCOL = "s3n"; + private static final String S3_FORMAT_KEY = "fs.%s.%s"; + private static final String HDFS_IMPL_KEY = "impl"; + + public static Configuration getConfiguration(Map s3Options) { + + if (!s3Options.containsKey(S3OptionRule.BUCKET.key())) { + throw new IllegalArgumentException( + "S3 datasource bucket is null, please check your config"); + } + if (!s3Options.containsKey(S3OptionRule.FS_S3A_ENDPOINT.key())) { + throw new IllegalArgumentException( + "S3 datasource endpoint is null, please check your config"); + } + String bucket = s3Options.get(S3OptionRule.BUCKET.key()); + + String protocol = DEFAULT_PROTOCOL; + if (bucket.startsWith(S3A_PROTOCOL)) { + protocol = S3A_PROTOCOL; + } + String fsImpl = protocol.equals(S3A_PROTOCOL) ? HDFS_S3A_IMPL : HDFS_S3N_IMPL; + Configuration hadoopConf = new Configuration(); + hadoopConf.set(FS_DEFAULT_NAME_KEY, bucket); + hadoopConf.set( + S3OptionRule.FS_S3A_ENDPOINT.key(), + s3Options.get(S3OptionRule.FS_S3A_ENDPOINT.key())); + hadoopConf.set(formatKey(protocol, HDFS_IMPL_KEY), fsImpl); + if (s3Options.containsKey(S3OptionRule.HADOOP_S3_PROPERTIES.key())) { + Config configObject = + ConfigFactory.parseString( + s3Options.get(S3OptionRule.HADOOP_S3_PROPERTIES.key())); + configObject + .entrySet() + .forEach( + entry -> { + hadoopConf.set( + entry.getKey(), entry.getValue().unwrapped().toString()); + }); + } + if (S3OptionRule.S3aAwsCredentialsProvider.SimpleAWSCredentialsProvider.getProvider() + .equals(s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()))) { + hadoopConf.set( + S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(), + s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key())); + hadoopConf.set("fs.s3a.access.key", s3Options.get(S3OptionRule.ACCESS_KEY.key())); + hadoopConf.set("fs.s3a.secret.key", s3Options.get(S3OptionRule.SECRET_KEY.key())); + } else { + hadoopConf.set( + S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(), + s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key())); + } + return hadoopConf; + } + + private static String formatKey(String protocol, String key) { + return String.format(S3_FORMAT_KEY, protocol, key); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DataSourceFactory.java new file mode 100644 index 000000000..5b2236562 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DataSourceFactory.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.s3; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +import com.google.auto.service.AutoService; +import com.google.common.collect.Sets; + +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class S3DataSourceFactory implements DataSourceFactory { + + private static final String PLUGIN_NAME = "S3"; + + @Override + public String factoryIdentifier() { + return PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + DataSourcePluginInfo s3DatasourcePluginInfo = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .type(DatasourcePluginTypeEnum.FILE.getCode()) + .version("1.0.0") + .supportVirtualTables(false) + .icon("S3File") + .build(); + + return Sets.newHashSet(s3DatasourcePluginInfo); + } + + @Override + public DataSourceChannel createChannel() { + return new S3DatasourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DatasourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DatasourceChannel.java new file mode 100644 index 000000000..cf0ee45b9 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DatasourceChannel.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.s3; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import lombok.NonNull; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class S3DatasourceChannel implements DataSourceChannel { + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return S3OptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return S3OptionRule.metadataRule(); + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + throw new UnsupportedOperationException("getTables is not supported for S3 datasource"); + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + throw new UnsupportedOperationException("getDatabases is not supported for S3 datasource"); + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + Configuration conf = HadoopS3AConfiguration.getConfiguration(requestParams); + try (FileSystem fs = FileSystem.get(conf)) { + fs.listStatus(new Path("/")); + return true; + } catch (IOException e) { + throw new DataSourcePluginException( + String.format("check s3 connectivity failed, config is: %s", requestParams), e); + } + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + throw new UnsupportedOperationException( + "getTableFields is not supported for S3 datasource"); + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + throw new UnsupportedOperationException( + "getTableFields is not supported for S3 datasource"); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3OptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3OptionRule.java new file mode 100644 index 000000000..ba56f303a --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3OptionRule.java @@ -0,0 +1,166 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.s3; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +import java.util.Arrays; +import java.util.Map; + +public class S3OptionRule { + + public static final Option ACCESS_KEY = + Options.key("access_key") + .stringType() + .noDefaultValue() + .withDescription("S3 access key"); + + public static final Option SECRET_KEY = + Options.key("secret_key") + .stringType() + .noDefaultValue() + .withDescription("S3 secret key"); + + public static final Option BUCKET = + Options.key("bucket").stringType().noDefaultValue().withDescription("S3 bucket name"); + + public static final Option FS_S3A_ENDPOINT = + Options.key("fs.s3a.endpoint") + .stringType() + .noDefaultValue() + .withDescription("fs s3a endpoint"); + + public static final Option S3A_AWS_CREDENTIALS_PROVIDER = + Options.key("fs.s3a.aws.credentials.provider") + .enumType(S3aAwsCredentialsProvider.class) + .defaultValue(S3aAwsCredentialsProvider.InstanceProfileCredentialsProvider) + .withDescription("s3a aws credentials provider"); + + public static final Option> HADOOP_S3_PROPERTIES = + Options.key("hadoop_s3_properties") + .mapType() + .noDefaultValue() + .withDescription( + "{\n" + + "fs.s3a.buffer.dir=/data/st_test/s3a\n" + + "fs.s3a.fast.upload.buffer=disk\n" + + "}"); + + public static OptionRule optionRule() { + return OptionRule.builder() + .required(BUCKET, FS_S3A_ENDPOINT, S3A_AWS_CREDENTIALS_PROVIDER) + .optional(HADOOP_S3_PROPERTIES) + .conditional( + S3A_AWS_CREDENTIALS_PROVIDER, + S3aAwsCredentialsProvider.SimpleAWSCredentialsProvider, + ACCESS_KEY, + SECRET_KEY) + .build(); + } + + public static final Option PATH = + Options.key("path").stringType().noDefaultValue().withDescription("S3 write path"); + + public static final Option TYPE = + Options.key("file_format_type") + .enumType(FileFormat.class) + .noDefaultValue() + .withDescription("S3 write type"); + + public static final Option DELIMITER = + Options.key("delimiter") + .stringType() + .noDefaultValue() + .withDescription("S3 write delimiter"); + + public static final Option> SCHEMA = + Options.key("schema").mapType().noDefaultValue().withDescription("SeaTunnel Schema"); + + public static final Option PARSE_PARSE_PARTITION_FROM_PATH = + Options.key("parse_partition_from_path") + .booleanType() + .noDefaultValue() + .withDescription("S3 write parse_partition_from_path"); + + public static final Option DATE_FORMAT = + Options.key("date_format") + .stringType() + .noDefaultValue() + .withDescription("S3 write date_format"); + + public static final Option DATETIME_FORMAT = + Options.key("time_format") + .stringType() + .noDefaultValue() + .withDescription("S3 write time_format"); + + public static final Option TIME_FORMAT = + Options.key("datetime_format") + .stringType() + .noDefaultValue() + .withDescription("S3 write datetime_format"); + + public static OptionRule metadataRule() { + return OptionRule.builder() + .required(PATH, TYPE) + .conditional(TYPE, FileFormat.TEXT, DELIMITER) + .conditional(TYPE, Arrays.asList(FileFormat.TEXT, FileFormat.JSON), SCHEMA) + .optional(PARSE_PARSE_PARTITION_FROM_PATH) + .optional(DATE_FORMAT) + .optional(DATETIME_FORMAT) + .optional(TIME_FORMAT) + .build(); + } + + public enum S3aAwsCredentialsProvider { + SimpleAWSCredentialsProvider("org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider"), + + InstanceProfileCredentialsProvider("com.amazonaws.auth.InstanceProfileCredentialsProvider"); + + private String provider; + + S3aAwsCredentialsProvider(String provider) { + this.provider = provider; + } + + public String getProvider() { + return provider; + } + + @Override + public String toString() { + return provider; + } + } + + public enum FileFormat { + CSV("csv"), + TEXT("txt"), + PARQUET("parquet"), + ORC("orc"), + JSON("json"); + + private final String type; + + FileFormat(String type) { + this.type = type; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/pom.xml new file mode 100644 index 000000000..2f9fe5881 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/pom.xml @@ -0,0 +1,55 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-sqlserver-cdc + + + 9.2.1.jre8 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.seatunnel + seatunnel-api + provided + + + com.google.auto.service + auto-service + + + + com.microsoft.sqlserver + mssql-jdbc + ${sqlserver.version} + provided + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceChannel.java new file mode 100644 index 000000000..8a3d39c1e --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceChannel.java @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.sqlserver; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; + +import com.google.common.collect.Sets; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +@Slf4j +public class SqlServerCDCDataSourceChannel implements DataSourceChannel { + + public static final Set MYSQL_SYSTEM_DATABASES = + Sets.newHashSet("master", "tempdb", "model", "msdb"); + + @Override + public boolean canAbleGetSchema() { + return true; + } + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return SqlServerCDCOptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return SqlServerCDCOptionRule.metadataRule(); + } + + @Override + public List getTables( + String pluginName, Map requestParams, String database) { + return this.getTableNames(requestParams, database); + } + + @Override + public List getDatabases(String pluginName, Map requestParams) { + try { + return this.getDataBaseNames(requestParams); + } catch (SQLException e) { + throw new DataSourcePluginException("get databases failed", e); + } + } + + @Override + public boolean checkDataSourceConnectivity( + String pluginName, Map requestParams) { + try (Connection connection = init(requestParams); + PreparedStatement statement = connection.prepareStatement("SELECT 1"); + ResultSet rs = statement.executeQuery()) { + return rs.next(); + } catch (SQLException e) { + throw new DataSourcePluginException("connect datasource failed", e); + } + } + + @Override + public List getTableFields( + String pluginName, Map requestParams, String database, String table) { + Pair pair = parseSchemaAndTable(table); + return getTableFields(requestParams, database, pair.getLeft(), pair.getRight()); + } + + @Override + public Map> getTableFields( + String pluginName, + Map requestParams, + String database, + List tables) { + Map> tableFields = new HashMap<>(tables.size()); + for (String table : tables) { + tableFields.put(table, getTableFields(pluginName, requestParams, database, table)); + } + return tableFields; + } + + private Connection init(Map requestParams) throws SQLException { + if (null == requestParams.get(SqlServerCDCOptionRule.BASE_URL.key())) { + throw new DataSourcePluginException("Jdbc url is null"); + } + String url = requestParams.get(SqlServerCDCOptionRule.BASE_URL.key()); + if (null != requestParams.get(SqlServerCDCOptionRule.PASSWORD.key()) + && null != requestParams.get(SqlServerCDCOptionRule.USERNAME.key())) { + String username = requestParams.get(SqlServerCDCOptionRule.USERNAME.key()); + String password = requestParams.get(SqlServerCDCOptionRule.PASSWORD.key()); + return DriverManager.getConnection(url, username, password); + } + return DriverManager.getConnection(url); + } + + private List getDataBaseNames(Map requestParams) throws SQLException { + List dbNames = new ArrayList<>(); + try (Connection connection = init(requestParams); + PreparedStatement statement = + connection.prepareStatement( + "SELECT NAME FROM SYS.DATABASES WHERE IS_CDC_ENABLED = 1;"); + ResultSet re = statement.executeQuery()) { + // filter system databases + while (re.next()) { + String dbName = re.getString("NAME"); + if (StringUtils.isNotBlank(dbName) && isNotSystemDatabase(dbName)) { + dbNames.add(dbName); + } + } + + return dbNames; + } + } + + private List getTableNames(Map requestParams, String dbName) { + final String sql = + String.format( + "SELECT SCHEMAS.NAME AS SCHEMA_NAME, TABLES.NAME AS TABLE_NAME" + + " FROM %s.SYS.SCHEMAS AS SCHEMAS" + + " JOIN %s.SYS.TABLES AS TABLES" + + " ON SCHEMAS.SCHEMA_ID = TABLES.SCHEMA_ID" + + " AND TABLES.IS_TRACKED_BY_CDC = 1", + dbName, dbName); + + List tableNames = new ArrayList<>(); + try (Connection connection = init(requestParams); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(sql)) { + while (resultSet.next()) { + String schemaName = resultSet.getString("SCHEMA_NAME"); + String tableName = resultSet.getString("TABLE_NAME"); + tableNames.add(schemaName + "." + tableName); + } + return tableNames; + } catch (SQLException e) { + throw new DataSourcePluginException("get table names failed", e); + } + } + + private List getTableFields( + Map requestParams, String dbName, String schemaName, String tableName) { + List tableFields = new ArrayList<>(); + try (Connection connection = init(requestParams); ) { + DatabaseMetaData metaData = connection.getMetaData(); + String primaryKey = getPrimaryKey(metaData, dbName, schemaName, tableName); + ResultSet resultSet = metaData.getColumns(dbName, schemaName, tableName, null); + while (resultSet.next()) { + TableField tableField = new TableField(); + String columnName = resultSet.getString("COLUMN_NAME"); + tableField.setPrimaryKey(false); + if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) { + tableField.setPrimaryKey(true); + } + tableField.setName(columnName); + tableField.setType(resultSet.getString("TYPE_NAME")); + tableField.setComment(resultSet.getString("REMARKS")); + Object nullable = resultSet.getObject("IS_NULLABLE"); + boolean isNullable = convertToBoolean(nullable); + tableField.setNullable(isNullable); + tableFields.add(tableField); + } + } catch (SQLException e) { + throw new DataSourcePluginException("get table fields failed", e); + } + return tableFields; + } + + private String getPrimaryKey( + DatabaseMetaData metaData, String dbName, String schemaName, String tableName) + throws SQLException { + ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, schemaName, tableName); + while (primaryKeysInfo.next()) { + return primaryKeysInfo.getString("COLUMN_NAME"); + } + return null; + } + + private boolean isNotSystemDatabase(String dbName) { + return MYSQL_SYSTEM_DATABASES.stream() + .noneMatch(systemDatabase -> StringUtils.equalsIgnoreCase(systemDatabase, dbName)); + } + + private boolean convertToBoolean(Object value) { + if (value instanceof Boolean) { + return (Boolean) value; + } + if (value instanceof String) { + return value.equals("TRUE"); + } + return false; + } + + private Pair parseSchemaAndTable(String tableName) { + String[] schemaAndTable = tableName.split("\\."); + if (schemaAndTable.length != 2) { + throw new DataSourcePluginException("table name is invalid"); + } + return Pair.of(schemaAndTable[0], schemaAndTable[1]); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceConfig.java new file mode 100644 index 000000000..4da5b9d57 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceConfig.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.sqlserver; + +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +public class SqlServerCDCDataSourceConfig { + + public static final String PLUGIN_NAME = "SqlServer-CDC"; + + public static final DataSourcePluginInfo SQLSERVER_CDC_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceFactory.java new file mode 100644 index 000000000..bc4f276f1 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceFactory.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.sqlserver; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; + +import java.util.Collections; +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class SqlServerCDCDataSourceFactory implements DataSourceFactory { + + @Override + public String factoryIdentifier() { + return SqlServerCDCDataSourceConfig.PLUGIN_NAME; + } + + @Override + public Set supportedDataSources() { + return Collections.singleton( + SqlServerCDCDataSourceConfig.SQLSERVER_CDC_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new SqlServerCDCDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCOptionRule.java new file mode 100644 index 000000000..95cf360b0 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCOptionRule.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.sqlserver; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +public class SqlServerCDCOptionRule { + + public static final Option BASE_URL = + Options.key("base-url") + .stringType() + .noDefaultValue() + .withDescription( + "URL has to be without database, like \"jdbc:sqlserver://localhost:1433;databaseName=test/\""); + + public static final Option USERNAME = + Options.key("username") + .stringType() + .noDefaultValue() + .withDescription( + "Name of the database to use when connecting to the database server."); + + public static final Option PASSWORD = + Options.key("password") + .stringType() + .noDefaultValue() + .withDescription("Password to use when connecting to the database server."); + + public static final Option DATABASE_NAME = + Options.key("database-name") + .stringType() + .noDefaultValue() + .withDescription("Database name of the database to monitor."); + + public static final Option TABLE_NAME = + Options.key("table-name") + .stringType() + .noDefaultValue() + .withDescription("Table name of the database to monitor."); + public static final Option SERVER_TIME_ZONE = + Options.key("server-time-zone") + .stringType() + .defaultValue("UTC") + .withDescription("The session time zone in database server."); + + public static OptionRule optionRule() { + return OptionRule.builder() + .required(USERNAME, PASSWORD, BASE_URL) + .optional(SERVER_TIME_ZONE) + .build(); + } + + public static OptionRule metadataRule() { + return OptionRule.builder().required(DATABASE_NAME, TABLE_NAME).build(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/test/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/test/TestSqlServerCDCDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/test/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/test/TestSqlServerCDCDataSourceChannel.java new file mode 100644 index 000000000..982c50721 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/test/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/test/TestSqlServerCDCDataSourceChannel.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.cdc.sqlserver.test; + +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.datasource.plugin.cdc.sqlserver.SqlServerCDCDataSourceChannel; +import org.apache.seatunnel.datasource.plugin.cdc.sqlserver.SqlServerCDCDataSourceConfig; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +public class TestSqlServerCDCDataSourceChannel { + + @Test + @Disabled + public void testConnect() { + SqlServerCDCDataSourceChannel channel = new SqlServerCDCDataSourceChannel(); + Map requestParams = new TreeMap<>(); + requestParams.put("base-url", "jdbc:sqlserver://localhost:1433;databaseName=test"); + requestParams.put("username", "sa"); + requestParams.put("password", "MyPass@word"); + + for (String database : + channel.getDatabases(SqlServerCDCDataSourceConfig.PLUGIN_NAME, requestParams)) { + final List tables = + channel.getTables( + SqlServerCDCDataSourceConfig.PLUGIN_NAME, requestParams, database); + final Map> tableFields = + channel.getTableFields( + SqlServerCDCDataSourceConfig.PLUGIN_NAME, + requestParams, + database, + tables); + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/pom.xml new file mode 100644 index 000000000..07e1d40c8 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/pom.xml @@ -0,0 +1,60 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource-plugins + ${revision} + + + datasource-starrocks + + + 8.0.16 + + + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + provided + + + org.apache.commons + commons-lang3 + + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + seatunnel-api + provided + + + mysql + mysql-connector-java + ${mysql.version} + provided + + + + diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksCatalog.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksCatalog.java new file mode 100644 index 000000000..4acc43e60 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksCatalog.java @@ -0,0 +1,225 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks; + +import org.apache.seatunnel.api.table.catalog.PrimaryKey; +import org.apache.seatunnel.api.table.catalog.TablePath; +import org.apache.seatunnel.api.table.catalog.exception.CatalogException; +import org.apache.seatunnel.api.table.catalog.exception.DatabaseNotExistException; +import org.apache.seatunnel.api.table.catalog.exception.TableNotExistException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.apache.commons.lang3.StringUtils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +import static com.google.common.base.Preconditions.checkArgument; + +public class StarRocksCatalog { + + protected final String catalogName; + protected final String username; + protected final String pwd; + protected final String baseUrl; + protected final String defaultUrl; + + private static final Set SYS_DATABASES = new HashSet<>(); + private static final Logger LOG = LoggerFactory.getLogger(StarRocksCatalog.class); + + static { + SYS_DATABASES.add("information_schema"); + SYS_DATABASES.add("_statistics_"); + } + + public StarRocksCatalog(String catalogName, String username, String pwd, String defaultUrl) { + + checkArgument(StringUtils.isNotBlank(username)); + checkArgument(StringUtils.isNotBlank(pwd)); + checkArgument(StringUtils.isNotBlank(defaultUrl)); + + defaultUrl = defaultUrl.trim(); + this.catalogName = catalogName; + this.username = username; + this.pwd = pwd; + this.defaultUrl = defaultUrl; + if (validateJdbcUrlWithDatabase(defaultUrl)) { + this.baseUrl = splitDefaultUrl(defaultUrl); + } else { + this.baseUrl = defaultUrl.endsWith("/") ? defaultUrl : defaultUrl + "/"; + } + } + + public List listDatabases() throws CatalogException { + List databases = new ArrayList<>(); + try (Connection conn = DriverManager.getConnection(defaultUrl, username, pwd); + PreparedStatement ps = conn.prepareStatement("SHOW DATABASES;"); + ResultSet rs = ps.executeQuery(); ) { + + while (rs.next()) { + String databaseName = rs.getString(1); + if (!SYS_DATABASES.contains(databaseName)) { + databases.add(rs.getString(1)); + } + } + + return databases; + } catch (Exception e) { + throw new CatalogException( + String.format("Failed listing database in catalog %s", this.catalogName), e); + } + } + + public List listTables(String databaseName) + throws CatalogException, DatabaseNotExistException { + if (!databaseExists(databaseName)) { + throw new DatabaseNotExistException(this.catalogName, databaseName); + } + + try (Connection conn = DriverManager.getConnection(baseUrl + databaseName, username, pwd); + PreparedStatement ps = conn.prepareStatement("SHOW TABLES;"); + ResultSet rs = ps.executeQuery()) { + + List tables = new ArrayList<>(); + + while (rs.next()) { + tables.add(rs.getString(1)); + } + + return tables; + } catch (Exception e) { + throw new CatalogException( + String.format("Failed listing database in catalog %s", catalogName), e); + } + } + + public List getTable(TablePath tablePath) + throws CatalogException, TableNotExistException { + if (!tableExists(tablePath)) { + throw new TableNotExistException(catalogName, tablePath); + } + + String dbUrl = baseUrl + tablePath.getDatabaseName(); + try (Connection conn = DriverManager.getConnection(dbUrl, username, pwd); + PreparedStatement statement = + conn.prepareStatement( + String.format( + "SELECT * FROM %s WHERE 1 = 0;", + String.format( + "`%s`.`%s`", + tablePath.getDatabaseName(), + tablePath.getTableName()))); ) { + + Optional primaryKey = + getPrimaryKey(tablePath.getDatabaseName(), tablePath.getTableName()); + + ResultSetMetaData tableMetaData = statement.getMetaData(); + + List fields = new ArrayList<>(); + for (int i = 1; i <= tableMetaData.getColumnCount(); i++) { + TableField tableField = new TableField(); + tableField.setName(tableMetaData.getColumnName(i)); + tableField.setType(tableMetaData.getColumnTypeName(i)); + tableField.setComment(tableMetaData.getColumnLabel(i)); + tableField.setNullable( + tableMetaData.isNullable(i) == ResultSetMetaData.columnNullable); + tableField.setPrimaryKey( + primaryKey.isPresent() + && primaryKey + .get() + .getColumnNames() + .contains(tableField.getName())); + // TODO add default value + tableField.setDefaultValue(null); + fields.add(tableField); + } + return fields; + } catch (Exception e) { + throw new CatalogException( + String.format("Failed getting table %s", tablePath.getFullName()), e); + } + } + + /** + * @return The array size is fixed at 2, index 0 is base url, and index 1 is default database. + */ + public static String splitDefaultUrl(String defaultUrl) { + int index = defaultUrl.lastIndexOf("/") + 1; + return defaultUrl.substring(0, index); + } + + protected Optional getPrimaryKey(String schema, String table) throws SQLException { + + List pkFields = new ArrayList<>(); + try (Connection conn = DriverManager.getConnection(defaultUrl, username, pwd); + PreparedStatement statement = + conn.prepareStatement( + String.format( + "SELECT COLUMN_NAME FROM information_schema.columns where TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s' AND COLUMN_KEY = 'PRI' ORDER BY ORDINAL_POSITION", + schema, table)); + ResultSet rs = statement.executeQuery()) { + while (rs.next()) { + String columnName = rs.getString("COLUMN_NAME"); + pkFields.add(columnName); + } + } + if (!pkFields.isEmpty()) { + // PK_NAME maybe null according to the javadoc, generate a unique name in that case + String pkName = "pk_" + String.join("_", pkFields); + return Optional.of(PrimaryKey.of(pkName, pkFields)); + } + return Optional.empty(); + } + + public boolean databaseExists(String databaseName) throws CatalogException { + checkArgument(StringUtils.isNotBlank(databaseName)); + + return listDatabases().contains(databaseName); + } + + /** + * URL has to be with database, like "jdbc:mysql://localhost:5432/db" rather than + * "jdbc:mysql://localhost:5432/". + */ + @SuppressWarnings("MagicNumber") + public static boolean validateJdbcUrlWithDatabase(String url) { + String[] parts = url.trim().split("\\/+"); + return parts.length == 3; + } + + public boolean tableExists(TablePath tablePath) throws CatalogException { + try { + return databaseExists(tablePath.getDatabaseName()) + && listTables(tablePath.getDatabaseName()).contains(tablePath.getTableName()); + } catch (DatabaseNotExistException e) { + return false; + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceChannel.java new file mode 100644 index 000000000..835864a4f --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceChannel.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks; + +import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.ObjectMapper; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.api.table.catalog.TablePath; +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import lombok.NonNull; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Socket; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class StarRocksDataSourceChannel implements DataSourceChannel { + + private static final Logger LOGGER = LoggerFactory.getLogger(StarRocksDataSourceChannel.class); + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + @Override + public boolean canAbleGetSchema() { + return true; + } + + @Override + public OptionRule getDataSourceOptions(@NonNull String pluginName) { + return StarRocksOptionRule.optionRule(); + } + + @Override + public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) { + return StarRocksOptionRule.metadataRule(); + } + + @Override + public List getTables( + @NonNull String pluginName, Map requestParams, String database) { + StarRocksCatalog catalog = getCatalog(requestParams); + return catalog.listTables(database); + } + + @Override + public List getDatabases( + @NonNull String pluginName, @NonNull Map requestParams) { + StarRocksCatalog catalog = getCatalog(requestParams); + return catalog.listDatabases(); + } + + @Override + public boolean checkDataSourceConnectivity( + @NonNull String pluginName, @NonNull Map requestParams) { + try { + StarRocksCatalog catalog = getCatalog(requestParams); + String nodeUrls = requestParams.get(StarRocksOptionRule.NODE_URLS.key()); + List nodeList = OBJECT_MAPPER.readValue(nodeUrls, List.class); + if (!telnet(nodeList.get(0))) { + return false; + } + catalog.listDatabases(); + return true; + } catch (Exception e) { + throw new DataSourcePluginException( + "check StarRocks connectivity failed, " + e.getMessage(), e); + } + } + + @SuppressWarnings("checkstyle:MagicNumber") + private static boolean telnet(String nodeUrl) throws IOException { + Socket socket = new Socket(); + boolean isConnected; + try { + String[] hostAndPort = nodeUrl.split(":"); + socket.connect( + new InetSocketAddress(hostAndPort[0], Integer.parseInt(hostAndPort[1])), 1000); + isConnected = socket.isConnected(); + } catch (IOException e) { + LOGGER.error("telnet error", e); + throw e; + } finally { + try { + socket.close(); + } catch (IOException e) { + LOGGER.error("Release Socket Connection Error", e); + } + } + return isConnected; + } + + @Override + public List getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull String table) { + StarRocksCatalog catalog = getCatalog(requestParams); + return catalog.getTable(TablePath.of(database, table)); + } + + @Override + public Map> getTableFields( + @NonNull String pluginName, + @NonNull Map requestParams, + @NonNull String database, + @NonNull List tables) { + StarRocksCatalog catalog = getCatalog(requestParams); + Map> tableFields = new HashMap<>(); + tables.forEach( + table -> tableFields.put(table, catalog.getTable(TablePath.of(database, table)))); + return tableFields; + } + + private StarRocksCatalog getCatalog(Map requestParams) { + try { + String username = requestParams.get(StarRocksOptionRule.USERNAME.key()); + String password = requestParams.get(StarRocksOptionRule.PASSWORD.key()); + String jdbc = requestParams.get(StarRocksOptionRule.BASE_URL.key()); + return new StarRocksCatalog("StarRocks", username, password, jdbc); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceConfig.java new file mode 100644 index 000000000..3c4063a2c --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceConfig.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks; + +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum; + +public class StarRocksDataSourceConfig { + + public static final String PLUGIN_NAME = "StarRocks"; + + public static final DataSourcePluginInfo STARROCKS_DATASOURCE_PLUGIN_INFO = + DataSourcePluginInfo.builder() + .name(PLUGIN_NAME) + .icon(PLUGIN_NAME) + .version("1.0.0") + .type(DatasourcePluginTypeEnum.DATABASE.getCode()) + .build(); +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceFactory.java new file mode 100644 index 000000000..f4ecddec5 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceFactory.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks; + +import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel; +import org.apache.seatunnel.datasource.plugin.api.DataSourceFactory; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; + +import com.google.auto.service.AutoService; + +import java.util.Collections; +import java.util.Set; + +@AutoService(DataSourceFactory.class) +public class StarRocksDataSourceFactory implements DataSourceFactory { + @Override + public String factoryIdentifier() { + return "StarRocks"; + } + + @Override + public Set supportedDataSources() { + return Collections.singleton(StarRocksDataSourceConfig.STARROCKS_DATASOURCE_PLUGIN_INFO); + } + + @Override + public DataSourceChannel createChannel() { + return new StarRocksDataSourceChannel(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksOptionRule.java new file mode 100644 index 000000000..6267e1556 --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksOptionRule.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.datasource.plugin.starrocks; + +import org.apache.seatunnel.api.configuration.Option; +import org.apache.seatunnel.api.configuration.Options; +import org.apache.seatunnel.api.configuration.util.OptionRule; + +import java.util.List; + +public class StarRocksOptionRule { + + public static final Option> NODE_URLS = + Options.key("nodeUrls") + .listType() + .noDefaultValue() + .withDescription( + "StarRocks cluster address, the format is [\"fe_ip:fe_http_port\", ...]"); + + public static final Option USERNAME = + Options.key("username") + .stringType() + .noDefaultValue() + .withDescription("StarRocks user username"); + + public static final Option PASSWORD = + Options.key("password") + .stringType() + .noDefaultValue() + .withDescription("StarRocks user password"); + + public static final Option DATABASE = + Options.key("database") + .stringType() + .noDefaultValue() + .withDescription("The name of StarRocks database"); + + public static final Option TABLE = + Options.key("table") + .stringType() + .noDefaultValue() + .withDescription("The name of StarRocks table"); + + public static final Option BASE_URL = + Options.key("base-url") + .stringType() + .noDefaultValue() + .withDescription( + "URL has to be without database, like \"jdbc:mysql://localhost:5432/\" or" + + "\"jdbc:mysql://localhost:5432\" rather than \"jdbc:mysql://localhost:5432/db\""); + + public static OptionRule optionRule() { + return OptionRule.builder().required(NODE_URLS, USERNAME, PASSWORD, BASE_URL).build(); + } + + public static OptionRule metadataRule() { + return OptionRule.builder().required(DATABASE, TABLE).build(); + } +} diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml new file mode 100644 index 000000000..2336b7b0a --- /dev/null +++ b/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml @@ -0,0 +1,67 @@ + + + + 4.0.0 + + org.apache.seatunnel + seatunnel-datasource + ${revision} + + + seatunnel-datasource-plugins + pom + + + datasource-plugins-api + datasource-all + datasource-kafka + datasource-elasticsearch + datasource-jdbc-hive + datasource-s3-redshift + datasource-starrocks + datasource-jdbc-clickhouse + datasource-jdbc-mysql + datasource-jdbc-oracle + datasource-jdbc-postgresql + datasource-jdbc-redshift + datasource-jdbc-sqlserver + datasource-jdbc-starrocks + datasource-mysql-cdc + datasource-s3 + datasource-sqlserver-cdc + datasource-jdbc-tidb + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + ${e2e.dependency.skip} + true + + + + + org.apache.maven.plugins + maven-shade-plugin + + + + + diff --git a/seatunnel-server/pom.xml b/seatunnel-server/pom.xml index 79ef3a310..0a623719c 100644 --- a/seatunnel-server/pom.xml +++ b/seatunnel-server/pom.xml @@ -13,133 +13,22 @@ See the License for the specific language governing permissions and limitations under the License. --> - + 4.0.0 - seatunnel-web org.apache.seatunnel - 1.0.0-SNAPSHOT + seatunnel-web + ${revision} - 4.0.0 seatunnel-server pom seatunnel-app + seatunnel-dynamicform seatunnel-spi seatunnel-scheduler seatunnel-server-common - - - 2.6.8 - 5.3.20 - 3.5.3.1 - 1.2.9 - 2.6.1 - 1.5.10 - 6.2.2.Final - 1.3.2 - 1.14.3 - 0.10.7 - 9.1.6 - - - - - - - org.springframework.boot - spring-boot-starter-web - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-jetty - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-aop - ${spring-boot.version} - - - org.springframework.boot - spring-boot-starter-jdbc - ${spring-boot.version} - - - com.alibaba - druid-spring-boot-starter - ${druid-spring-boot-starter.version} - - - - - com.baomidou - mybatis-plus-boot-starter - ${mybatis-plus-boot-starter.version} - - - org.springframework.boot - spring-boot-starter-jdbc - - - spring-boot-autoconfigure - org.springframework.boot - - - - - org.hibernate.validator - hibernate-validator - ${hibernate.validator.version} - - - - - io.springfox - springfox-swagger2 - ${springfox-swagger.version} - - - io.springfox - springfox-swagger-ui - ${springfox-swagger.version} - - - io.swagger - swagger-annotations - ${swagger-annotations.version} - - - - - io.jsonwebtoken - jjwt-api - ${jwt.version} - - - io.jsonwebtoken - jjwt-impl - ${jwt.version} - runtime - - - io.jsonwebtoken - jjwt-jackson - ${jwt.version} - runtime - - - - - org.jsoup - jsoup - ${jsoup.version} - - - - - \ No newline at end of file + diff --git a/seatunnel-server/seatunnel-app/pom.xml b/seatunnel-server/seatunnel-app/pom.xml index aef911857..0d330ec31 100644 --- a/seatunnel-server/seatunnel-app/pom.xml +++ b/seatunnel-server/seatunnel-app/pom.xml @@ -13,28 +13,347 @@ See the License for the specific language governing permissions and limitations under the License. --> - + 4.0.0 - seatunnel-server org.apache.seatunnel - 1.0.0-SNAPSHOT + seatunnel-server + ${revision} - 4.0.0 seatunnel-app - - 1.8 - seatunnel-ui - - + + + + io.swagger.core.v3 + swagger-annotations + 2.2.14 + + + + org.awaitility + awaitility + 4.2.0 + + org.apache.seatunnel seatunnel-common + + org.apache.seatunnel + seatunnel-api + + + org.apache.seatunnel + seatunnel-dynamicform + ${project.version} + + + org.apache.seatunnel + seatunnel-plugin-discovery + + + + org.apache.seatunnel + seatunnel-server-common + ${project.version} + + + + + org.apache.seatunnel + seatunnel-datasource-client + ${project.version} + + + org.apache.seatunnel + datasource-plugins-api + ${project.version} + + + + + cn.hutool + hutool-all + 5.8.16 + + + + + org.apache.seatunnel + seatunnel-transforms-v2 + + + + + + org.apache.seatunnel + connector-common + test + + + + org.apache.seatunnel + seatunnel-transforms-v2 + + + + org.apache.seatunnel + connector-console + test + + + + org.apache.seatunnel + connector-fake + test + + + + org.apache.seatunnel + connector-kafka + test + + + org.apache.seatunnel + connector-http-base + test + + + org.apache.seatunnel + connector-http-feishu + test + + + org.apache.seatunnel + connector-http-wechat + test + + + org.apache.seatunnel + connector-http-myhours + test + + + org.apache.seatunnel + connector-http-lemlist + test + + + org.apache.seatunnel + connector-http-klaviyo + test + + + org.apache.seatunnel + connector-http-onesignal + test + + + org.apache.seatunnel + connector-http-notion + test + + + org.apache.seatunnel + connector-jdbc + test + + + org.apache.seatunnel + connector-cdc-mysql + test + + + org.apache.seatunnel + connector-cdc-sqlserver + test + + + org.apache.seatunnel + connector-socket + test + + + org.apache.seatunnel + connector-clickhouse + test + + + org.apache.seatunnel + connector-pulsar + test + + + org.apache.seatunnel + connector-hive + test + + + org.apache.seatunnel + connector-file-hadoop + test + + + org.apache.seatunnel + connector-file-local + test + + + org.apache.seatunnel + connector-file-oss + test + + + org.apache.seatunnel + connector-file-ftp + test + + + org.apache.seatunnel + connector-file-sftp + test + + + org.apache.seatunnel + connector-hudi + test + + + org.apache.seatunnel + connector-dingtalk + test + + + org.apache.seatunnel + connector-kudu + test + + + org.apache.seatunnel + connector-email + test + + + org.apache.seatunnel + connector-elasticsearch + test + + + org.apache.seatunnel + connector-iotdb + test + + + org.apache.seatunnel + connector-neo4j + test + + + org.apache.seatunnel + connector-redis + test + + + org.apache.seatunnel + connector-google-sheets + test + + + org.apache.seatunnel + connector-datahub + test + + + org.apache.seatunnel + connector-sentry + test + + + org.apache.seatunnel + connector-mongodb + test + + + org.apache.seatunnel + connector-iceberg + test + + + org.apache.seatunnel + connector-influxdb + test + + + org.apache.seatunnel + connector-cassandra + test + + + org.apache.seatunnel + connector-file-s3 + test + + + org.apache.seatunnel + connector-amazondynamodb + test + + + org.apache.seatunnel + connector-starrocks + test + + + org.apache.seatunnel + connector-tablestore + test + + + org.apache.seatunnel + connector-slack + test + + + org.apache.seatunnel + connector-http-gitlab + test + + + org.apache.seatunnel + connector-http-jira + test + + + org.apache.seatunnel + connector-rabbitmq + test + + + org.apache.seatunnel + connector-openmldb + test + + + org.apache.seatunnel + connector-doris + test + + + org.apache.seatunnel + connector-maxcompute + test + + + org.apache.seatunnel + connector-cdc-sqlserver + test + + org.springframework.boot @@ -45,8 +364,8 @@ spring-boot-starter-tomcat - log4j-to-slf4j org.apache.logging.log4j + log4j-to-slf4j @@ -76,22 +395,31 @@ mybatis-plus-boot-starter + + com.google.code.gson + gson + + io.springfox springfox-swagger2 ${springfox-swagger.version} - spring-aop org.springframework + spring-aop - spring-beans org.springframework + spring-beans - spring-context org.springframework + spring-context + + + com.fasterxml.jackson.core + jackson-annotations @@ -99,13 +427,11 @@ io.springfox springfox-swagger-ui - ${springfox-swagger.version} io.swagger swagger-annotations - ${swagger-annotations.version} @@ -113,22 +439,17 @@ hibernate-validator - classmate com.fasterxml + classmate + org.apache.commons commons-lang3 - - - - mysql - mysql-connector-java - ${mysql.version} - provided + 3.12.0 @@ -143,8 +464,8 @@ ${project.version} - slf4j-log4j12 org.slf4j + slf4j-log4j12 @@ -153,18 +474,11 @@ org.apache.seatunnel seatunnel-scheduler-dolphinscheduler ${project.version} - - - slf4j-log4j12 - org.slf4j - - org.springframework.boot spring-boot-starter-test - ${spring-boot.version} test @@ -185,11 +499,80 @@ spring-boot-starter-aop + + commons-io + commons-io + + com.cronutils cron-utils - ${cron-utils.version} + + + com.google.auto.service + auto-service-annotations + + + com.google.auto.service + auto-service + + + org.apache.seatunnel + datasource-s3 + provided + + + + org.apache.seatunnel + datasource-kafka + 1.0.0-SNAPSHOT + provided + + + + org.apache.seatunnel + datasource-jdbc-mysql + 1.0.0-SNAPSHOT + provided + + + org.apache.seatunnel + datasource-mysql-cdc + 1.0.0-SNAPSHOT + provided + + + org.apache.seatunnel + datasource-sqlserver-cdc + 1.0.0-SNAPSHOT + provided + + + + org.apache.seatunnel + datasource-jdbc-sqlserver + 1.0.0-SNAPSHOT + provided + + + + com.google.code.findbugs + jsr305 + + + + org.apache.seatunnel + seatunnel-engine-client + + + org.awaitility + awaitility + compile + + + org.junit.jupiter + junit-jupiter-api @@ -197,75 +580,28 @@ org.apache.maven.plugins - maven-jar-plugin - - - *.yml - *.yaml - *.xml - - - - - - com.github.eirslett - frontend-maven-plugin - 1.11.3 - - ${project.basedir}/../../${frontend.project.name} - - - - install node and npm - - install-node-and-npm - - - v16.16.0 - 8.19.2 - - - - install - - npm - - generate-resources - - install --ignore-scripts - - - - build - - npm - - - run build:prod - - - - - - - maven-assembly-plugin + maven-resources-plugin + 3.2.0 - seatunnel-web + copy-resources - single + copy-resources - package + process-classes - seatunnel-web - - src/main/assembly/seatunnel-web.xml - - false + ${project.build.outputDirectory}/META-INF/services + + + ${project.basedir}/src/main/resources/META-INF/services + true + + - \ No newline at end of file + + diff --git a/seatunnel-server/seatunnel-app/src/main/assembly/seatunnel-web.xml b/seatunnel-server/seatunnel-app/src/main/assembly/seatunnel-web.xml deleted file mode 100644 index 0d1f4935a..000000000 --- a/seatunnel-server/seatunnel-app/src/main/assembly/seatunnel-web.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - seatunnel-web - - dir - zip - - false - seatunnel-web - - - ${basedir}/src/main/resources - - *.yml - *.yaml - *.xml - - conf - - - ${basedir}/src/main/bin - - *.sh - - bin - - - ${basedir}/../../seatunnel-ui/dist - - * - - dist - - - - - libs - - - diff --git a/seatunnel-server/seatunnel-app/src/main/bin/seatunnel-backend-daemon.sh b/seatunnel-server/seatunnel-app/src/main/bin/seatunnel-backend-daemon.sh index 285f39cd1..005b2faac 100644 --- a/seatunnel-server/seatunnel-app/src/main/bin/seatunnel-backend-daemon.sh +++ b/seatunnel-server/seatunnel-app/src/main/bin/seatunnel-backend-daemon.sh @@ -37,7 +37,7 @@ start() { echo "$WORKDIR" $JAVA_HOME/bin/java $JAVA_OPTS \ - -cp "$WORKDIR/../conf":"$WORKDIR/../libs/*" \ + -cp "$WORKDIR/../conf":"$WORKDIR/../libs/*":"$WORKDIR/../datasource/*" \ $SPRING_OPTS \ org.apache.seatunnel.app.SeatunnelApplication echo "seatunnel started" diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java index 4947195fe..791d12b41 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java @@ -25,12 +25,13 @@ import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.transaction.annotation.EnableTransactionManagement; -@SpringBootApplication(scanBasePackages = {"org.apache.seatunnel.app", "org.apache.seatunnel.scheduler"}) +@SpringBootApplication( + scanBasePackages = {"org.apache.seatunnel.app", "org.apache.seatunnel.scheduler"}) @EnableTransactionManagement @EnableConfigurationProperties @EnableScheduling @EnableAsync(proxyTargetClass = true) -@MapperScan({"org.apache.seatunnel.app.dal"}) +@MapperScan({"org.apache.seatunnel.app.dal.mapper"}) public class SeatunnelApplication { public static void main(String[] args) { SpringApplication.run(SeatunnelApplication.class, args); diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java index 9f1d4ab0f..a20f5a8f0 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java @@ -23,40 +23,82 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.web.method.support.HandlerMethodArgumentResolver; +import org.springframework.web.servlet.LocaleResolver; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; +import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; +import org.springframework.web.servlet.config.annotation.ViewControllerRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; +import org.springframework.web.servlet.i18n.CookieLocaleResolver; import javax.annotation.Resource; import java.util.List; +import java.util.Locale; @Configuration public class SeatunnelWebAdapter implements WebMvcConfigurer { + + public static final String LOCALE_LANGUAGE_COOKIE = "language"; + public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*"; + public static final String LOGIN_PATH_PATTERN = "/seatunnel/api/v1/user/login**"; + public static final String REGISTER_PATH_PATTERN = "/users/register"; + @Bean public AuthenticationInterceptor authenticationInterceptor() { return new AuthenticationInterceptor(); } - @Resource - private UserIdMethodArgumentResolver currentUserMethodArgumentResolver; + @Resource private UserIdMethodArgumentResolver currentUserMethodArgumentResolver; + + /** + * Cookie + * + * @return local resolver + */ + @Bean(name = "localeResolver") + public LocaleResolver localeResolver() { + CookieLocaleResolver localeResolver = new CookieLocaleResolver(); + localeResolver.setCookieName(LOCALE_LANGUAGE_COOKIE); + // set default locale + localeResolver.setDefaultLocale(Locale.US); + // set language tag compliant + localeResolver.setLanguageTagCompliant(false); + return localeResolver; + } @Override public void addInterceptors(InterceptorRegistry registry) { - registry.addInterceptor(authenticationInterceptor()).order(1).addPathPatterns("/**") - // exclude swagger api path + registry.addInterceptor(authenticationInterceptor()) + .order(1) + .addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN) .excludePathPatterns( + LOGIN_PATH_PATTERN, + REGISTER_PATH_PATTERN, "/swagger-resources/**", "/webjars/**", "/v2/**", - "/swagger-ui.html**" - ) - // exclude login - .excludePathPatterns("/api/v1/user/login**") - ; + "*.html", + "/ui/**", + "/error", + "/swagger-ui.html**"); } @Override public void addArgumentResolvers(List argumentResolvers) { argumentResolvers.add(currentUserMethodArgumentResolver); } + + @Override + public void addResourceHandlers(ResourceHandlerRegistry registry) { + registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/"); + registry.addResourceHandler("/webjars/**") + .addResourceLocations("classpath:/META-INF/resources/webjars/"); + registry.addResourceHandler("/ui/**").addResourceLocations("file:ui/"); + } + + @Override + public void addViewControllers(ViewControllerRegistry registry) { + registry.addViewController("/").setViewName("redirect:/ui/"); + registry.addViewController("/ui/").setViewName("forward:/ui/index.html"); + } } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java index 3d481c2f5..48c7b6769 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java @@ -17,11 +17,8 @@ package org.apache.seatunnel.app.aspect; -import static org.apache.seatunnel.server.common.Constants.USER_ID; - import org.apache.seatunnel.app.dal.dao.IUserDao; -import lombok.extern.slf4j.Slf4j; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Before; @@ -31,26 +28,28 @@ import org.springframework.web.context.request.RequestContextHolder; import org.springframework.web.context.request.ServletRequestAttributes; +import lombok.extern.slf4j.Slf4j; + import javax.annotation.Resource; import javax.servlet.http.HttpServletRequest; +import static org.apache.seatunnel.server.common.Constants.USER_ID; + @Slf4j @Aspect @Component @Order(2) public class LogoutAspect { - @Resource - private IUserDao userDaoImpl; + @Resource private IUserDao userDaoImpl; @Pointcut("execution(public * org.apache.seatunnel.app.controller.UserController.logout(..))") - public void logoutPointCut() { - - } + public void logoutPointCut() {} @Before("logoutPointCut()") public void check(JoinPoint pjp) { - ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); + ServletRequestAttributes attributes = + (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); HttpServletRequest request = attributes.getRequest(); final Integer userId = (Integer) request.getAttribute(USER_ID); userDaoImpl.disableToken(userId); diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java index 628e103ca..8d812be39 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java @@ -24,5 +24,4 @@ @Target({ElementType.PARAMETER}) @Retention(RetentionPolicy.RUNTIME) -public @interface UserId { -} +public @interface UserId {} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/connector/ConnectorCache.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/connector/ConnectorCache.java new file mode 100644 index 000000000..df0fc5aae --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/connector/ConnectorCache.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.bean.connector; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.app.domain.response.connector.ConnectorFeature; +import org.apache.seatunnel.app.domain.response.connector.ConnectorInfo; +import org.apache.seatunnel.app.dynamicforms.FormStructure; +import org.apache.seatunnel.app.thirdparty.framework.PluginDiscoveryUtil; +import org.apache.seatunnel.common.config.Common; +import org.apache.seatunnel.common.config.DeployMode; +import org.apache.seatunnel.common.constants.PluginType; +import org.apache.seatunnel.plugin.discovery.PluginIdentifier; +import org.apache.seatunnel.server.common.SeatunnelErrorEnum; +import org.apache.seatunnel.server.common.SeatunnelException; + +import org.springframework.stereotype.Component; + +import lombok.NonNull; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Function; +import java.util.stream.Collectors; + +@Component +public class ConnectorCache { + + private final ConcurrentMap> downloadConnectorCache = + new ConcurrentHashMap<>(); + + private final ConcurrentMap> allConnectorCache = + new ConcurrentHashMap<>(); + + private final ConcurrentMap allConnectorOptionRule = + new ConcurrentHashMap<>(); + + private List transformCache = new CopyOnWriteArrayList<>(); + + private ConcurrentMap sourceFormStructureCache = + new ConcurrentHashMap<>(); + + private ConcurrentMap sinkFormStructureCache = new ConcurrentHashMap<>(); + + private ConcurrentMap transformFormStructureCache = + new ConcurrentHashMap<>(); + + private Map featureMap = new HashMap<>(); + + public ConnectorCache() throws IOException { + refresh(); + } + + public List getAllConnectors(PluginType pluginType) { + return allConnectorCache.get(pluginType); + } + + public List getTransform() { + return transformCache; + } + + public List getDownLoadConnector(PluginType pluginType) { + return downloadConnectorCache.get(pluginType); + } + + public List getNotDownLoadConnector(PluginType pluginType) { + Map allConnectors = + allConnectorCache.get(pluginType).stream() + .collect( + Collectors.toMap( + ConnectorInfo::getPluginIdentifier, Function.identity())); + downloadConnectorCache + .get(pluginType) + .forEach(d -> allConnectors.remove(d.getPluginIdentifier())); + return new ArrayList<>(allConnectors.values()); + } + + public ConnectorFeature getConnectorFeature(PluginIdentifier connectorInfo) { + return featureMap.get(connectorInfo); + } + + public synchronized void refresh() throws IOException { + Common.setDeployMode(DeployMode.CLIENT); + Map> allConnectors = + PluginDiscoveryUtil.getAllConnectors(); + allConnectorOptionRule.clear(); + allConnectors.forEach((key, value) -> allConnectorOptionRule.putAll(value)); + + downloadConnectorCache.put( + PluginType.SOURCE, + PluginDiscoveryUtil.getDownloadedConnectors(allConnectors, PluginType.SOURCE)); + downloadConnectorCache.put( + PluginType.SINK, + PluginDiscoveryUtil.getDownloadedConnectors(allConnectors, PluginType.SINK)); + allConnectorCache.put( + PluginType.SOURCE, + PluginDiscoveryUtil.getAllConnectorsFromPluginMapping(PluginType.SOURCE)); + allConnectorCache.put( + PluginType.SINK, + PluginDiscoveryUtil.getAllConnectorsFromPluginMapping(PluginType.SINK)); + transformCache = PluginDiscoveryUtil.getTransforms(allConnectors); + + sourceFormStructureCache = + PluginDiscoveryUtil.getDownloadedConnectorFormStructures( + allConnectors, PluginType.SOURCE); + sinkFormStructureCache = + PluginDiscoveryUtil.getDownloadedConnectorFormStructures( + allConnectors, PluginType.SINK); + transformFormStructureCache = PluginDiscoveryUtil.getTransformFormStructures(allConnectors); + syncSourceFeature(); + } + + private void syncSourceFeature() throws IOException { + featureMap = PluginDiscoveryUtil.getConnectorFeatures(PluginType.SOURCE); + } + + public FormStructure getFormStructure( + @NonNull String pluginType, @NonNull String connectorName) { + if (PluginType.SOURCE.getType().equals(pluginType)) { + return sourceFormStructureCache.get(connectorName); + } + + if (PluginType.TRANSFORM.getType().equals(pluginType)) { + return transformFormStructureCache.get(connectorName); + } + + if (PluginType.SINK.getType().equals(pluginType)) { + return sinkFormStructureCache.get(connectorName); + } + + throw new SeatunnelException(SeatunnelErrorEnum.UNSUPPORTED_CONNECTOR_TYPE, pluginType); + } + + public OptionRule getOptionRule(@NonNull String pluginType, @NonNull String connectorName) { + return allConnectorOptionRule.get( + PluginIdentifier.of("seatunnel", pluginType, connectorName)); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/engine/EngineDataType.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/engine/EngineDataType.java new file mode 100644 index 000000000..214f81391 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/engine/EngineDataType.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.bean.engine; + +import org.apache.seatunnel.api.table.type.ArrayType; +import org.apache.seatunnel.api.table.type.BasicType; +import org.apache.seatunnel.api.table.type.DecimalType; +import org.apache.seatunnel.api.table.type.LocalTimeType; +import org.apache.seatunnel.api.table.type.PrimitiveByteArrayType; +import org.apache.seatunnel.api.table.type.SeaTunnelDataType; + +import lombok.AllArgsConstructor; +import lombok.Data; + +public class EngineDataType { + + public static DataType T_STRING = new DataType("string", BasicType.STRING_TYPE); + public static DataType T_BOOLEAN = new DataType("boolean", BasicType.BOOLEAN_TYPE); + public static DataType T_BYTE = new DataType("tinyint", BasicType.BYTE_TYPE); + public static DataType T_SHORT = new DataType("smallint", BasicType.SHORT_TYPE); + public static DataType T_INT = new DataType("int", BasicType.INT_TYPE); + public static DataType T_LONG = new DataType("bigint", BasicType.LONG_TYPE); + public static DataType T_FLOAT = new DataType("float", BasicType.FLOAT_TYPE); + public static DataType T_DOUBLE = new DataType("double", BasicType.DOUBLE_TYPE); + public static DataType T_VOID = new DataType("null", BasicType.VOID_TYPE); + + public static DataType T_DECIMAL = new DataType("decimal(38, 18)", new DecimalType(38, 18)); + + public static DataType T_LOCAL_DATE = new DataType("date", LocalTimeType.LOCAL_DATE_TYPE); + public static DataType T_LOCAL_TIME = new DataType("time", LocalTimeType.LOCAL_TIME_TYPE); + public static DataType T_LOCAL_DATE_TIME = + new DataType("timestamp", LocalTimeType.LOCAL_DATE_TIME_TYPE); + + public static DataType T_PRIMITIVE_BYTE_ARRAY = + new DataType("bytes", PrimitiveByteArrayType.INSTANCE); + + public static DataType T_STRING_ARRAY = + new DataType("array", ArrayType.STRING_ARRAY_TYPE); + public static DataType T_BOOLEAN_ARRAY = + new DataType("array", ArrayType.BOOLEAN_ARRAY_TYPE); + public static DataType T_BYTE_ARRAY = new DataType("array", ArrayType.BYTE_ARRAY_TYPE); + public static DataType T_SHORT_ARRAY = + new DataType("array", ArrayType.SHORT_ARRAY_TYPE); + public static DataType T_INT_ARRAY = new DataType("array", ArrayType.INT_ARRAY_TYPE); + public static DataType T_LONG_ARRAY = new DataType("array", ArrayType.LONG_ARRAY_TYPE); + public static DataType T_FLOAT_ARRAY = new DataType("array", ArrayType.FLOAT_ARRAY_TYPE); + public static DataType T_DOUBLE_ARRAY = + new DataType("array", ArrayType.DOUBLE_ARRAY_TYPE); + + @Data + @AllArgsConstructor + public static class DataType { + String name; + SeaTunnelDataType RawType; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/env/JobEnvCache.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/env/JobEnvCache.java new file mode 100644 index 000000000..c488c0e0f --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/bean/env/JobEnvCache.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.bean.env; + +import org.apache.seatunnel.api.configuration.util.OptionRule; +import org.apache.seatunnel.api.env.EnvOptionRule; +import org.apache.seatunnel.app.dynamicforms.AbstractFormOption; +import org.apache.seatunnel.app.dynamicforms.FormStructure; +import org.apache.seatunnel.app.thirdparty.framework.SeaTunnelOptionRuleWrapper; + +import org.springframework.stereotype.Component; + +import lombok.Data; +import lombok.Getter; + +import java.util.List; +import java.util.stream.Collectors; + +@Component +@Data +public class JobEnvCache { + + @Getter private final FormStructure envFormStructure; + + public JobEnvCache() { + OptionRule envOptionRules = EnvOptionRule.getEnvOptionRules(); + envFormStructure = + SeaTunnelOptionRuleWrapper.wrapper( + envOptionRules.getOptionalOptions(), + envOptionRules.getRequiredOptions(), + "Env"); + List collect = + envFormStructure.getForms().stream() + .filter(form -> !"parallelism".equalsIgnoreCase(form.getField())) + .collect(Collectors.toList()); + envFormStructure.setForms(collect); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ConditionType.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ConditionType.java new file mode 100644 index 000000000..63bbe413a --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ConditionType.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +import java.util.HashMap; +import java.util.Map; + +/** condition type */ +public enum ConditionType { + + /** 0 none 1 judge 2 delay */ + NONE(0, "none"), + JUDGE(1, "judge"), + DELAY(2, "delay"); + + ConditionType(int code, String desc) { + this.code = code; + this.desc = desc; + } + + @EnumValue private final int code; + private final String desc; + + public int getCode() { + return code; + } + + public String getDesc() { + return desc; + } + + private static final Map CONDITION_TYPE_MAP = new HashMap<>(); + + static { + for (ConditionType conditionType : ConditionType.values()) { + CONDITION_TYPE_MAP.put(conditionType.desc, conditionType); + } + } + + public static ConditionType of(String desc) { + if (CONDITION_TYPE_MAP.containsKey(desc)) { + return CONDITION_TYPE_MAP.get(desc); + } + throw new IllegalArgumentException("invalid type : " + desc); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Constants.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Constants.java new file mode 100644 index 000000000..b32c48ed4 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Constants.java @@ -0,0 +1,658 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.SystemUtils; + +import java.time.Duration; +import java.util.regex.Pattern; + +public final class Constants { + + private Constants() { + throw new UnsupportedOperationException("Construct Constants"); + } + + /** common properties path */ + public static final String COMMON_PROPERTIES_PATH = "/common.properties"; + + public static final String FORMAT_SS = "%s%s"; + public static final String FORMAT_S_S = "%s/%s"; + public static final String FORMAT_S_S_COLON = "%s:%s"; + public static final String AWS_ACCESS_KEY_ID = "aws.access.key.id"; + public static final String AWS_SECRET_ACCESS_KEY = "aws.secret.access.key"; + public static final String AWS_REGION = "aws.region"; + public static final String FOLDER_SEPARATOR = "/"; + + public static final String FOLDER_DOLLOR = "$"; + + public static final String RESOURCE_TYPE_FILE = "resources"; + + public static final String RESOURCE_TYPE_UDF = "udfs"; + + public static final String STORAGE_S3 = "S3"; + + public static final String STORAGE_HDFS = "HDFS"; + + public static final String EMPTY_STRING = ""; + + /** fs.defaultFS */ + public static final String FS_DEFAULT_FS = "fs.defaultFS"; + + /** hadoop configuration */ + public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE"; + + public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = + "resource.manager.httpaddress.port"; + + /** yarn.resourcemanager.ha.rm.ids */ + public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids"; + + /** yarn.application.status.address */ + public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address"; + + /** yarn.job.history.status.address */ + public static final String YARN_JOB_HISTORY_STATUS_ADDRESS = "yarn.job.history.status.address"; + + /** hdfs configuration hdfs.root.user */ + public static final String HDFS_ROOT_USER = "hdfs.root.user"; + + /** hdfs/s3 configuration resource.upload.path */ + public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path"; + + /** data basedir path */ + public static final String DATA_BASEDIR_PATH = "data.basedir.path"; + + /** dolphinscheduler.env.path */ + public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path"; + + /** environment properties default path */ + public static final String ENV_PATH = "dolphinscheduler_env.sh"; + + /** resource.view.suffixs */ + public static final String RESOURCE_VIEW_SUFFIXES = "resource.view.suffixs"; + + public static final String RESOURCE_VIEW_SUFFIXES_DEFAULT_VALUE = + "txt,log,sh,bat,conf,cfg,py,java,sql,xml,hql,properties,json,yml,yaml,ini,js"; + + /** development.state */ + public static final String DEVELOPMENT_STATE = "development.state"; + + /** sudo enable */ + public static final String SUDO_ENABLE = "sudo.enable"; + + public static final String SET_TENANT_OWNER_ENABLE = "setTenantOwner.enable"; + + /** string true */ + public static final String STRING_TRUE = "true"; + + /** resource storage type */ + public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type"; + + public static final String AWS_END_POINT = "aws.endpoint"; + /** comma , */ + public static final String COMMA = ","; + + /** COLON : */ + public static final String COLON = ":"; + + /** period . */ + public static final String PERIOD = "."; + + /** QUESTION ? */ + public static final String QUESTION = "?"; + + /** SPACE " " */ + public static final String SPACE = " "; + + /** SINGLE_SLASH / */ + public static final String SINGLE_SLASH = "/"; + + /** DOUBLE_SLASH // */ + public static final String DOUBLE_SLASH = "//"; + + /** EQUAL SIGN */ + public static final String EQUAL_SIGN = "="; + + /** AT SIGN */ + public static final String AT_SIGN = "@"; + + /** date format of yyyy-MM-dd HH:mm:ss */ + public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss"; + + /** date format of yyyyMMdd */ + public static final String YYYYMMDD = "yyyyMMdd"; + + /** date format of yyyyMMddHHmmss */ + public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss"; + + /** date format of yyyyMMddHHmmssSSS */ + public static final String YYYYMMDDHHMMSSSSS = "yyyyMMddHHmmssSSS"; + /** http connect time out */ + public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000; + + /** http connect request time out */ + public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000; + + /** httpclient soceket time out */ + public static final int SOCKET_TIMEOUT = 60 * 1000; + + /** registry session timeout */ + public static final int REGISTRY_SESSION_TIMEOUT = 10 * 1000; + + /** http header */ + public static final String HTTP_HEADER_UNKNOWN = "unKnown"; + + /** http X-Forwarded-For */ + public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For"; + + /** http X-Real-IP */ + public static final String HTTP_X_REAL_IP = "X-Real-IP"; + + /** UTF-8 */ + public static final String UTF_8 = "UTF-8"; + + /** user name regex */ + public static final Pattern REGEX_USER_NAME = Pattern.compile("^[a-zA-Z0-9._-]{3,39}$"); + + /** read permission */ + public static final int READ_PERMISSION = 2; + + /** write permission */ + public static final int WRITE_PERMISSION = 2 * 2; + + /** execute permission */ + public static final int EXECUTE_PERMISSION = 1; + + /** default admin permission */ + public static final int DEFAULT_ADMIN_PERMISSION = 7; + + /** default hash map size */ + public static final int DEFAULT_HASH_MAP_SIZE = 16; + + /** all permissions */ + public static final int ALL_PERMISSIONS = + READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION; + + /** max task timeout */ + public static final int MAX_TASK_TIMEOUT = 24 * 3600; + + /** worker host weight */ + public static final int DEFAULT_WORKER_HOST_WEIGHT = 100; + + /** time unit secong to minutes */ + public static final int SEC_2_MINUTES_TIME_UNIT = 60; + + /** + * * + * + *

rpc port + */ + public static final String RPC_PORT = "rpc.port"; + + /** forbid running task */ + public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN"; + + /** normal running task */ + public static final String FLOWNODE_RUN_FLAG_NORMAL = "NORMAL"; + + public static final String COMMON_TASK_TYPE = "common"; + + public static final String DEFAULT = "default"; + public static final String PASSWORD = "password"; + public static final String XXXXXX = "******"; + public static final String NULL = "NULL"; + public static final String THREAD_NAME_MASTER_SERVER = "Master-Server"; + public static final String THREAD_NAME_WORKER_SERVER = "Worker-Server"; + public static final String THREAD_NAME_ALERT_SERVER = "Alert-Server"; + + /** command parameter keys */ + public static final String CMD_PARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId"; + + public static final String CMD_PARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList"; + + public static final String CMD_PARAM_RECOVERY_WAITING_THREAD = "WaitingThreadInstanceId"; + + public static final String CMD_PARAM_SUB_PROCESS = "processInstanceId"; + + public static final String CMD_PARAM_EMPTY_SUB_PROCESS = "0"; + + public static final String CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; + + public static final String CMD_PARAM_SUB_PROCESS_DEFINE_CODE = "processDefinitionCode"; + + public static final String CMD_PARAM_START_NODES = "StartNodeList"; + + public static final String CMD_PARAM_CLEAN_STATE_TASK_INSTANCE_IDS = + "CleanStateTaskInstanceIds"; + + public static final String CMD_PARAM_DEPENDENT_TASK_TRACK_DATA = "dependentTaskTrackData"; + + public static final String CMD_PARAM_SUBPROCESS_TASK_TRACK_DATA = "subProcessTaskTrackData"; + + public static final String CMD_PARAM_RECOVERY_PAUSED_ISOLATED_TASK_IDS = + "RecoveryPausedIsolationTaskInstanceIds"; + public static final String CMD_PARAM_RECOVERY_KILLED_ISOLATED_TASK_IDS = + "RecoveryKilledIsolationTaskInstanceIds"; + + public static final String CMD_PARAM_RECOVERY_PAUSED_BY_CORONATION_TASK_IDS = + "RecoveryPausedByCoronationTaskInstanceIds"; + + public static final String CMD_PARAM_START_PARAMS = "StartParams"; + + public static final String CMD_PARAM_FATHER_PARAMS = "fatherParams"; + + /** complement data start date */ + public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate"; + + /** complement data end date */ + public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate"; + + /** complement data Schedule date */ + public static final String CMDPARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST = + "complementScheduleDateList"; + + public static final String CMDPARAM_COMPLEMENT_DATA_CALENDAR_TYPE = "complementCalendarType"; + + public static final String CMDPARAM_COMPLEMENT_DATA_CALENDAR_CODE = "complementCalendarCode"; + + /** complement date default cron string */ + public static final String DEFAULT_CRON_STRING = "0 0 0 * * ? *"; + + /** sleep 1000ms */ + public static final long SLEEP_TIME_MILLIS = 1_000L; + + /** short sleep 100ms */ + public static final long SLEEP_TIME_MILLIS_SHORT = 100L; + + public static final Duration SERVER_CLOSE_WAIT_TIME = Duration.ofSeconds(3); + + /** one second mils */ + public static final long SECOND_TIME_MILLIS = 1_000L; + + /** master task instance cache-database refresh interval */ + public static final long CACHE_REFRESH_TIME_MILLIS = 20 * 1_000L; + + /** heartbeat for zk info length */ + public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 13; + + /** jar */ + public static final String JAR = "jar"; + + /** hadoop */ + public static final String HADOOP = "hadoop"; + + /** -D = */ + public static final String D = "-D"; + + /** exit code success */ + public static final int EXIT_CODE_SUCCESS = 0; + + /** exit code failure */ + public static final int EXIT_CODE_FAILURE = -1; + + /** process or task definition failure */ + public static final int DEFINITION_FAILURE = -1; + + public static final int OPPOSITE_VALUE = -1; + + /** process or task definition first version */ + public static final int VERSION_FIRST = 1; + + /** date format of yyyyMMdd */ + public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd"; + + /** date format of yyyyMMddHHmmss */ + public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss"; + + /** system date(yyyyMMddHHmmss) */ + public static final String PARAMETER_DATETIME = "system.datetime"; + + /** system date(yyyymmdd) today */ + public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate"; + + /** system date(yyyymmdd) yesterday */ + public static final String PARAMETER_BUSINESS_DATE = "system.biz.date"; + + public static final String TASK_TYPE_CONDITIONS = "CONDITIONS"; + + public static final String TASK_TYPE_SWITCH = "SWITCH"; + + public static final String TASK_TYPE_SUB_PROCESS = "SUB_PROCESS"; + + public static final String TASK_TYPE_DEPENDENT = "DEPENDENT"; + + public static final String TASK_TYPE_BLOCKING = "BLOCKING"; + + public static final String TASK_TYPE_STREAM = "STREAM"; + + /** ACCEPTED */ + public static final String ACCEPTED = "ACCEPTED"; + + /** SUCCEEDED */ + public static final String SUCCEEDED = "SUCCEEDED"; + /** ENDED */ + public static final String ENDED = "ENDED"; + /** NEW */ + public static final String NEW = "NEW"; + /** NEW_SAVING */ + public static final String NEW_SAVING = "NEW_SAVING"; + /** SUBMITTED */ + public static final String SUBMITTED = "SUBMITTED"; + /** FAILED */ + public static final String FAILED = "FAILED"; + /** KILLED */ + public static final String KILLED = "KILLED"; + /** RUNNING */ + public static final String RUNNING = "RUNNING"; + /** underline "_" */ + public static final String UNDERLINE = "_"; + /** application regex */ + public static final String APPLICATION_REGEX = "application_\\d+_\\d+"; + + public static final String PID = SystemUtils.IS_OS_WINDOWS ? "handle" : "pid"; + /** month_begin */ + public static final String MONTH_BEGIN = "month_begin"; + /** add_months */ + public static final String ADD_MONTHS = "add_months"; + /** month_end */ + public static final String MONTH_END = "month_end"; + /** week_begin */ + public static final String WEEK_BEGIN = "week_begin"; + /** week_end */ + public static final String WEEK_END = "week_end"; + /** timestamp */ + public static final String TIMESTAMP = "timestamp"; + + public static final char SUBTRACT_CHAR = '-'; + public static final char ADD_CHAR = '+'; + public static final char MULTIPLY_CHAR = '*'; + public static final char DIVISION_CHAR = '/'; + public static final char LEFT_BRACE_CHAR = '('; + public static final char RIGHT_BRACE_CHAR = ')'; + public static final String ADD_STRING = "+"; + public static final String STAR = "*"; + public static final String DIVISION_STRING = "/"; + public static final String LEFT_BRACE_STRING = "("; + public static final char P = 'P'; + public static final char N = 'N'; + public static final String SUBTRACT_STRING = "-"; + public static final String GLOBAL_PARAMS = "globalParams"; + public static final String LOCAL_PARAMS = "localParams"; + public static final String SUBPROCESS_INSTANCE_ID = "subProcessInstanceId"; + public static final String PROCESS_INSTANCE_STATE = "processInstanceState"; + public static final String PARENT_WORKFLOW_INSTANCE = "parentWorkflowInstance"; + public static final String CONDITION_RESULT = "conditionResult"; + public static final String SWITCH_RESULT = "switchResult"; + public static final String WAIT_START_TIMEOUT = "waitStartTimeout"; + public static final String DEPENDENCE = "dependence"; + public static final String TASK_LIST = "taskList"; + public static final String QUEUE = "queue"; + public static final String QUEUE_NAME = "queueName"; + public static final int LOG_QUERY_SKIP_LINE_NUMBER = 0; + public static final int LOG_QUERY_LIMIT = 4096; + public static final String OTHER_PARAMS = "otherParams"; + public static final String DEPEND_STRATEGY = "dependStrategy"; + public static final String BLOCKING_CONDITION = "blockingCondition"; + public static final String ALERT_WHEN_BLOCKING = "alertWhenBlocking"; + + /** master/worker server use for zk */ + public static final String MASTER_TYPE = "master"; + + public static final String WORKER_TYPE = "worker"; + public static final String DELETE_OP = "delete"; + public static final String ADD_OP = "add"; + public static final String ALIAS = "alias"; + public static final String CONTENT = "content"; + + public static final String PATH = "path"; + public static final String DEPENDENT_SPLIT = ":||"; + public static final long DEPENDENT_ALL_TASK_CODE = 0; + + /** preview schedule execute count */ + public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5; + + /** kerberos */ + public static final String KERBEROS = "kerberos"; + + /** kerberos expire time */ + public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time"; + + /** java.security.krb5.conf */ + public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf"; + + /** java.security.krb5.conf.path */ + public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path"; + + /** hadoop.security.authentication */ + public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; + + /** hadoop.security.authentication */ + public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = + "hadoop.security.authentication.startup.state"; + + /** com.amazonaws.services.s3.enableV4 */ + public static final String AWS_S3_V4 = "com.amazonaws.services.s3.enableV4"; + + /** loginUserFromKeytab user */ + public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username"; + + /** loginUserFromKeytab path */ + public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path"; + + public static final String WORKFLOW_INSTANCE_ID_MDC_KEY = "workflowInstanceId"; + public static final String TASK_INSTANCE_ID_MDC_KEY = "taskInstanceId"; + + /** task log info format */ + public static final String TASK_LOG_INFO_FORMAT = "TaskLogInfo-%s"; + + /** status */ + public static final String STATUS = "status"; + + /** message */ + public static final String MSG = "msg"; + + /** data total */ + public static final String COUNT = "count"; + + /** page size */ + public static final String PAGE_SIZE = "pageSize"; + + /** current page no */ + public static final String PAGE_NUMBER = "pageNo"; + + /** */ + public static final String DATA_LIST = "data"; + + public static final String TOTAL_LIST = "totalList"; + + public static final String CURRENT_PAGE = "currentPage"; + + public static final String TOTAL_PAGE = "totalPage"; + + public static final String TOTAL = "total"; + + /** workflow */ + public static final String WORKFLOW_LIST = "workFlowList"; + + public static final String WORKFLOW_RELATION_LIST = "workFlowRelationList"; + + /** session user */ + public static final String SESSION_USER = "session.user"; + + public static final String SESSION_ID = "sessionId"; + + /** Security authentication types (supported types: PASSWORD,LDAP) */ + public static final String SECURITY_CONFIG_TYPE = "securityConfigType"; + + /** locale */ + public static final String LOCALE_LANGUAGE = "language"; + + /** database type */ + public static final String MYSQL = "MYSQL"; + + public static final String HIVE = "HIVE"; + + public static final String ADDRESS = "address"; + public static final String DATABASE = "database"; + public static final String OTHER = "other"; + public static final String USER = "user"; + public static final String JDBC_URL = "jdbcUrl"; + + /** session timeout */ + public static final int SESSION_TIME_OUT = 7200; + + public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024; + public static final String UDF = "UDF"; + public static final String CLASS = "class"; + + /** dataSource sensitive param */ + public static final String DATASOURCE_PASSWORD_REGEX = + "(?<=((?i)password((\":\")|(=')))).*?(?=((\")|(')))"; + + /** default worker group */ + public static final String DEFAULT_WORKER_GROUP = "default"; + /** authorize writable perm */ + public static final int AUTHORIZE_WRITABLE_PERM = 7; + /** authorize readable perm */ + public static final int AUTHORIZE_READABLE_PERM = 4; + + public static final int NORMAL_NODE_STATUS = 0; + public static final int ABNORMAL_NODE_STATUS = 1; + public static final int BUSY_NODE_STATUE = 2; + + public static final String START_TIME = "start time"; + public static final String END_TIME = "end time"; + public static final String START_END_DATE = "startDate,endDate"; + + /** system line separator */ + public static final String SYSTEM_LINE_SEPARATOR = System.getProperty("line.separator"); + + /** datasource encryption salt */ + public static final String DATASOURCE_ENCRYPTION_SALT_DEFAULT = "!@#$%^&*"; + + public static final String DATASOURCE_ENCRYPTION_ENABLE = "datasource.encryption.enable"; + public static final String DATASOURCE_ENCRYPTION_SALT = "datasource.encryption.salt"; + + /** network interface preferred */ + public static final String DOLPHIN_SCHEDULER_NETWORK_INTERFACE_PREFERRED = + "dolphin.scheduler.network.interface.preferred"; + + /** network IP gets priority, default inner outer */ + public static final String DOLPHIN_SCHEDULER_NETWORK_PRIORITY_STRATEGY = + "dolphin.scheduler.network.priority.strategy"; + + /** exec shell scripts */ + public static final String SH = "sh"; + + /** pstree, get pud and sub pid */ + public static final String PSTREE = "pstree"; + + public static final boolean KUBERNETES_MODE = + !StringUtils.isEmpty(System.getenv("KUBERNETES_SERVICE_HOST")) + && !StringUtils.isEmpty(System.getenv("KUBERNETES_SERVICE_PORT")); + + /** dry run flag */ + public static final int DRY_RUN_FLAG_NO = 0; + + public static final int DRY_RUN_FLAG_YES = 1; + + /** data.quality.error.output.path */ + public static final String DATA_QUALITY_ERROR_OUTPUT_PATH = "data-quality.error.output.path"; + + public static final String CACHE_KEY_VALUE_ALL = "'all'"; + + /** use for k8s */ + public static final String NAMESPACE = "namespace"; + + public static final String K8S = "k8s"; + public static final String LIMITS_CPU = "limitsCpu"; + public static final String LIMITS_MEMORY = "limitsMemory"; + + public static final String K8S_LOCAL_TEST_CLUSTER = "ds_null_k8s"; + + /** schedule timezone */ + public static final String SCHEDULE_TIMEZONE = "schedule_timezone"; + + public static final int RESOURCE_FULL_NAME_MAX_LENGTH = 128; + + /** tenant */ + public static final int TENANT_FULL_NAME_MAX_LENGTH = 30; + + /** schedule time the amount of date data is too large, affecting the memory, so set 100 */ + public static final int SCHEDULE_TIME_MAX_LENGTH = 100; + + /** password max and min LENGTH */ + public static final int USER_PASSWORD_MAX_LENGTH = 20; + + public static final int USER_PASSWORD_MIN_LENGTH = 2; + + public static final String FUNCTION_START_WITH = "$"; + + public static final Integer DEFAULT_QUEUE_ID = 1; + + public static final Integer DEFAULT_MAX_RETRY_COUNT = 100; + + public static final String TASK_PRIORITY_QUEUE = "TASK_PRIORITY_QUEUE"; + + public static final String TASK_DISPATCH_FAILED_QUEUE = "TASK_DISPATCH_FAILED_QUEUE"; + + public static final String TASK_PARSE_RUNNING_LOG_QUEUE = "TASK_PARSE_RUNNING_LOG_QUEUE"; + + public static final int DESC_LENGTH_GO_ONLINE = 255; + + public static final int NAME_LENGTH_GO_ONLINE = 100; + + /** Task Types */ + public static final String TYPE_UNIVERSAL = "Universal"; + + public static final String TYPE_DATA_INTEGRATION = "DataIntegration"; + public static final String TYPE_CLOUD = "Cloud"; + public static final String TYPE_LOGIC = "Logic"; + public static final String TYPE_DATA_QUALITY = "DataQuality"; + public static final String TYPE_OTHER = "Other"; + public static final String TYPE_MACHINE_LEARNING = "MachineLearning"; + + /** file upload verify */ + public static final String FILE_TYPE_RESTRICTED_LIST = "file.type.restricted.list"; + + public static final String FILE_NAME_RESTRICTED_CONTENT = "file.name.restricted.content"; + + /** need parse running log task types */ + public static final String NEED_PARSE_RUNNING_LOG_TASK_TYPES = + "need.parse.running.log.task.types"; + + public static final String GIT_RESOURCE_BASE_DIR = "git.resource.base.dir"; + + public static final String GIT_RESOURCE_FILTER = "git.resource.filter"; + + public static final String FILE_TYPE_SUPPORT_LIST = "file.type.support.list"; + + public static final String SYSTEM_INTERNAL_PERMISSION_FILTER = + "system.internal.permission.filter"; + + public static final String WS_SUPPORT_DATASOURCES = "ws.support.datasources"; + + public static final String SEATUNNEL_TRANSFORMS_JAR_PATH = "seatunnel.transforms.jar.path"; + + /** default alert group id */ + public static final int DEFAULT_ALERT_GROUP_ID = 1; + + public static final String TASK_ID = "taskId"; +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DataType.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DataType.java new file mode 100644 index 000000000..c398ac274 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DataType.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +/** data types in user define parameter */ +public enum DataType { + /** + * 0 string 1 integer 2 long 3 float 4 double 5 date, "YYYY-MM-DD" 6 time, "HH:MM:SS" 7 time + * stamp 8 Boolean 9 list + */ + VARCHAR, + INTEGER, + LONG, + FLOAT, + DOUBLE, + DATE, + TIME, + TIMESTAMP, + BOOLEAN, + LIST +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DbType.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DbType.java new file mode 100644 index 000000000..b48dc2275 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DbType.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +import java.util.Arrays; +import java.util.Map; +import java.util.NoSuchElementException; + +import static java.util.stream.Collectors.toMap; + +// todo: implement with SourceType...... +public enum DbType { + MYSQL(0, "mysql"), + POSTGRESQL(1, "postgresql"), + HIVE(2, "hive"), + SPARK(3, "spark"), + CLICKHOUSE(4, "clickhouse"), + ORACLE(5, "oracle"), + SQLSERVER(6, "sqlserver"), + DB2(7, "db2"), + PRESTO(8, "presto"), + H2(9, "h2"), + REDSHIFT(10, "redshift"), + DAMENG(11, "dameng"), + STARROCKS(12, "starrocks"), + // todo: this is not a db type.... + SSH(13, "ssh"), + PROXY(14, "proxy"), + ; + + @EnumValue private final int code; + private final String descp; + + DbType(int code, String descp) { + this.code = code; + this.descp = descp; + } + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } + + private static final Map DB_TYPE_MAP = + Arrays.stream(DbType.values()).collect(toMap(DbType::getCode, val -> val)); + + public static DbType of(int type) { + if (DB_TYPE_MAP.containsKey(type)) { + return DB_TYPE_MAP.get(type); + } + return null; + } + + public static DbType ofName(String name) { + return Arrays.stream(DbType.values()) + .filter(e -> e.name().equals(name)) + .findFirst() + .orElseThrow(() -> new NoSuchElementException("no such db type")); + } + + public boolean isHive() { + return this == DbType.HIVE; + } + + /** + * support execute multiple segmented statements at a time + * + * @return + */ + public boolean isSupportMultipleStatement() { + return this == DbType.SPARK; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DependResult.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DependResult.java new file mode 100644 index 000000000..f4e46d54b --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DependResult.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +/** depend result */ +public enum DependResult { + + /** 0 success 1 waiting 2 failed 3 non execution */ + SUCCESS, + WAITING, + FAILED, + NON_EXEC +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DependentRelation.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DependentRelation.java new file mode 100644 index 000000000..c06142e06 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/DependentRelation.java @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +/** dependent relation: and or */ +public enum DependentRelation { + AND, + OR; +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Direct.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Direct.java new file mode 100644 index 000000000..9fdea5c16 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Direct.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +/** parameter of stored procedure */ +public enum Direct { + /** 0 in; 1 out; */ + IN, + OUT +} diff --git a/seatunnel-ui/src/store/locale/types.ts b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/EngineType.java similarity index 87% rename from seatunnel-ui/src/store/locale/types.ts rename to seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/EngineType.java index 03625ade5..096d22bb5 100644 --- a/seatunnel-ui/src/store/locale/types.ts +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/EngineType.java @@ -15,10 +15,10 @@ * limitations under the License. */ -type Locales = 'zh_CN' | 'en_US' +package org.apache.seatunnel.app.common; -interface LocalesStore { - locales: Locales +public enum EngineType { + Spark, + Flink, + SeaTunnel } - -export { LocalesStore, Locales } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ExecutionStatus.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ExecutionStatus.java new file mode 100644 index 000000000..62d8e0939 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ExecutionStatus.java @@ -0,0 +1,250 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +import java.util.HashMap; + +/** running status for workflow and task nodes */ +public enum ExecutionStatus { + + /** + * status: 0 submit success 1 running 2 ready pause 3 pause 4 ready stop 5 stop 6 failure 7 + * success 8 need fault tolerance 9 kill 10 waiting thread 11 waiting depend node complete 12 + * delay execution 13 forced success 14 serial wait 15 ready block 16 block 17 dispatch + */ + SUBMITTED_SUCCESS(0, "submit success"), + RUNNING_EXECUTION(1, "running"), + READY_PAUSE(2, "ready pause"), + PAUSE(3, "pause"), + READY_STOP(4, "ready stop"), + STOP(5, "stop"), + FAILURE(6, "failure"), + SUCCESS(7, "success"), + NEED_FAULT_TOLERANCE(8, "need fault tolerance"), + KILL(9, "kill"), + WAITING_THREAD(10, "waiting thread"), + WAITING_DEPEND(11, "waiting depend node complete"), + DELAY_EXECUTION(12, "delay execution"), + FORCED_SUCCESS(13, "forced success"), + SERIAL_WAIT(14, "serial wait"), + READY_BLOCK(15, "ready block"), + BLOCK(16, "block"), + DISPATCH(17, "dispatch"), + PAUSE_BY_ISOLATION(18, "paused by isolation"), + KILL_BY_ISOLATION(19, "killed by isolation"), + + PAUSE_BY_CORONATION(20, "paused by coronation"), + FORBIDDEN_BY_CORONATION(21, "forbidden by coronation"), + ; + + ExecutionStatus(int code, String descp) { + this.code = code; + this.descp = descp; + } + + @EnumValue private final int code; + private final String descp; + + private static HashMap EXECUTION_STATUS_MAP = new HashMap<>(); + + private static final int[] NEED_FAILOVER_STATES = + new int[] { + ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), + ExecutionStatus.DISPATCH.ordinal(), + ExecutionStatus.RUNNING_EXECUTION.ordinal(), + ExecutionStatus.DELAY_EXECUTION.ordinal(), + ExecutionStatus.READY_PAUSE.ordinal(), + ExecutionStatus.READY_STOP.ordinal() + }; + + static { + for (ExecutionStatus executionStatus : ExecutionStatus.values()) { + EXECUTION_STATUS_MAP.put(executionStatus.code, executionStatus); + } + } + + /** + * status is success + * + * @return status + */ + public boolean typeIsSuccess() { + return this == SUCCESS || this == FORCED_SUCCESS || this == FORBIDDEN_BY_CORONATION; + } + + /** + * status is failure + * + * @return status + */ + public boolean typeIsFailure() { + return this == FAILURE || this == NEED_FAULT_TOLERANCE; + } + + /** + * status is finished + * + * @return status + */ + public boolean typeIsFinished() { + return typeIsSuccess() + || typeIsFailure() + || typeIsCancel() + || typeIsPause() + || typeIsPauseByIsolation() + || typeIsStop() + || typeIsBlock() + || typeIsPauseByCoronation() + || typeIsForbiddenByCoronation(); + } + + public boolean typeIsReady() { + return this == READY_PAUSE || this == READY_STOP || this == READY_BLOCK; + } + + /** + * status is waiting thread + * + * @return status + */ + public boolean typeIsWaitingThread() { + return this == WAITING_THREAD; + } + + /** + * status is pause + * + * @return status + */ + public boolean typeIsPause() { + return this == PAUSE; + } + + public boolean typeIsPauseByIsolation() { + return this == PAUSE_BY_ISOLATION; + } + + public boolean typeIsPauseByCoronation() { + return this == PAUSE_BY_CORONATION; + } + + public boolean typeIsForbiddenByCoronation() { + return this == FORBIDDEN_BY_CORONATION; + } + + public boolean typeIsKilledByIsolation() { + return this == KILL_BY_ISOLATION; + } + + public boolean typeIsIsolated() { + return this == PAUSE_BY_ISOLATION || this == KILL_BY_ISOLATION; + } + + /** + * status is pause + * + * @return status + */ + public boolean typeIsStop() { + return this == STOP; + } + + /** + * status is running + * + * @return status + */ + public boolean typeIsRunning() { + return this == RUNNING_EXECUTION || this == WAITING_DEPEND || this == DELAY_EXECUTION; + } + + /** + * status is block + * + * @return status + */ + public boolean typeIsBlock() { + return this == BLOCK; + } + + /** + * status is cancel + * + * @return status + */ + public boolean typeIsCancel() { + return this == KILL || this == STOP || this == KILL_BY_ISOLATION; + } + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } + + public static ExecutionStatus of(int status) { + if (EXECUTION_STATUS_MAP.containsKey(status)) { + return EXECUTION_STATUS_MAP.get(status); + } + throw new IllegalArgumentException("invalid status : " + status); + } + + public static boolean isNeedFailoverWorkflowInstanceState(ExecutionStatus executionStatus) { + return ExecutionStatus.SUBMITTED_SUCCESS == executionStatus + || ExecutionStatus.DISPATCH == executionStatus + || ExecutionStatus.RUNNING_EXECUTION == executionStatus + || ExecutionStatus.DELAY_EXECUTION == executionStatus + || ExecutionStatus.READY_PAUSE == executionStatus + || ExecutionStatus.READY_STOP == executionStatus; + } + + public static int[] getNeedFailoverWorkflowInstanceState() { + return NEED_FAILOVER_STATES; + } + + public static int[] getRunningProcessState() { + return new int[] { + ExecutionStatus.RUNNING_EXECUTION.getCode(), + ExecutionStatus.SUBMITTED_SUCCESS.getCode(), + ExecutionStatus.DISPATCH.getCode(), + ExecutionStatus.SERIAL_WAIT.getCode() + }; + } + + public static int[] getNotTerminatedStates() { + return new int[] { + ExecutionStatus.SUBMITTED_SUCCESS.getCode(), + ExecutionStatus.DISPATCH.getCode(), + ExecutionStatus.RUNNING_EXECUTION.getCode(), + ExecutionStatus.DELAY_EXECUTION.getCode(), + ExecutionStatus.READY_PAUSE.getCode(), + ExecutionStatus.READY_STOP.getCode(), + ExecutionStatus.NEED_FAULT_TOLERANCE.getCode(), + ExecutionStatus.WAITING_THREAD.getCode(), + ExecutionStatus.WAITING_DEPEND.getCode() + }; + } + + public boolean canStop() { + return typeIsRunning() || this == READY_PAUSE; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Flag.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Flag.java new file mode 100644 index 000000000..3d52d4615 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Flag.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +/** have_script have_file can_retry have_arr_variables have_map_variables have_alert */ +public enum Flag { + + /** 0 no 1 yes */ + NO(0, "no"), + YES(1, "yes"); + + Flag(int code, String descp) { + this.code = code; + this.descp = descp; + } + + @EnumValue private final int code; + private final String descp; + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Priority.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Priority.java new file mode 100644 index 000000000..20633dcd4 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Priority.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +/** define process and task priority */ +public enum Priority { + + /** 0 highest priority 1 higher priority 2 medium priority 3 lower priority 4 lowest priority */ + HIGHEST(0, "highest"), + HIGH(1, "high"), + MEDIUM(2, "medium"), + LOW(3, "low"), + LOWEST(4, "lowest"); + + Priority(int code, String descp) { + this.code = code; + this.descp = descp; + } + + @EnumValue private final int code; + private final String descp; + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ReleaseState.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ReleaseState.java new file mode 100644 index 000000000..11a59c977 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ReleaseState.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +/** process define release state */ +public enum ReleaseState { + + /** 0 offline 1 on line */ + OFFLINE(0, "offline"), + ONLINE(1, "online"); + + ReleaseState(int code, String descp) { + this.code = code; + this.descp = descp; + } + + @EnumValue private final int code; + private final String descp; + + public static ReleaseState getEnum(int value) { + for (ReleaseState e : ReleaseState.values()) { + if (e.ordinal() == value) { + return e; + } + } + // For values out of enum scope + return null; + } + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ResUploadType.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ResUploadType.java new file mode 100644 index 000000000..5a7e570cb --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ResUploadType.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +/** data base types */ +public enum ResUploadType { + /** 0 hdfs 1 s3 2 none */ + HDFS, + S3, + NONE +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ResourceType.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ResourceType.java new file mode 100644 index 000000000..c220d8ce8 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ResourceType.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +public enum ResourceType { + DATASOURCE, + UDF; +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Result.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Result.java index e08323ffc..5b03b2a4b 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Result.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Result.java @@ -29,7 +29,7 @@ public class Result { private T data; - private Result() { + public Result() { this.data = null; } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java index cd2566abc..836bfe7e4 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java @@ -23,7 +23,7 @@ public enum RoleTypeEnum { ; private final int code; - private final String description; + private final String description; RoleTypeEnum(int code, String description) { this.code = code; diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java index 56195dd8d..22f286be2 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java @@ -23,7 +23,7 @@ public enum ScriptParamStatusEnum { ; private final int code; - private final String description; + private final String description; ScriptParamStatusEnum(int code, String description) { this.code = code; diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java index 0a4144e04..154fd2a5b 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java @@ -24,7 +24,7 @@ public enum ScriptStatusEnum { ; private final int code; - private final String description; + private final String description; ScriptStatusEnum(int code, String description) { this.code = code; diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java index 89d627c60..353f2b2c3 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java @@ -23,7 +23,7 @@ public enum ScriptTypeEnum { ; private final int code; - private final String description; + private final String description; ScriptTypeEnum(int code, String description) { this.code = code; diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/SeaTunnelConnectorI18n.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/SeaTunnelConnectorI18n.java new file mode 100644 index 000000000..aa3144bec --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/SeaTunnelConnectorI18n.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import org.apache.seatunnel.shade.com.typesafe.config.Config; +import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory; + +import org.apache.commons.io.IOUtils; + +import java.nio.charset.StandardCharsets; + +public class SeaTunnelConnectorI18n { + public static Config CONNECTOR_I18N_CONFIG_EN; + public static Config CONNECTOR_I18N_CONFIG_ZH; + + static { + try { + CONNECTOR_I18N_CONFIG_EN = + ConfigFactory.parseString( + IOUtils.toString( + SeaTunnelConnectorI18n.class.getResourceAsStream( + "/i18n_en.config"), + StandardCharsets.UTF_8)); + CONNECTOR_I18N_CONFIG_ZH = + ConfigFactory.parseString( + IOUtils.toString( + SeaTunnelConnectorI18n.class.getResourceAsStream( + "/i18n_zh.config"), + StandardCharsets.UTF_8)); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Status.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Status.java new file mode 100644 index 000000000..39ec64d30 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/Status.java @@ -0,0 +1,854 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import org.springframework.context.i18n.LocaleContextHolder; + +import java.util.Locale; +import java.util.Optional; + +/** status enum // todo #4855 One category one interval */ +public enum Status { + SUCCESS(0, "success", "成功"), + FAILED(1, "failed", "失败"), + INTERNAL_SERVER_ERROR_ARGS(10000, "Internal Server Error: {0}", "服务端异常: {0}"), + + REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"), + TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"), + USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"), + USER_NAME_NULL(10004, "user name is null", "用户名不能为空"), + HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"), + TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"), + OS_TENANT_CODE_EXIST(10009, "os tenant code {0} already exists", "操作系统租户[{0}]已存在"), + USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"), + ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"), + ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"), + USER_NAME_PASSWD_ERROR(10013, "user name or password error", "用户名或密码错误"), + LOGIN_SESSION_FAILED(10014, "create session failed!", "创建session失败"), + DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"), + DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"), + TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"), + PROJECT_NOT_FOUND(10018, "project {0} not found ", "项目[{0}]不存在"), + PROJECT_ALREADY_EXISTS(10019, "project {0} already exists", "项目名称[{0}]已存在"), + TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist", "任务实例[{0}]不存在"), + TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE( + 10021, "task instance {0} is not sub process instance", "任务实例[{0}]不是子流程实例"), + SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist", "调度配置定时表达式[{0}]不存在"), + SCHEDULE_CRON_ONLINE_FORBID_UPDATE( + 10023, "online status does not allow update operations", "调度配置上线状态不允许修改"), + SCHEDULE_CRON_CHECK_FAILED( + 10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"), + MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"), + SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"), + CREATE_ALERT_GROUP_ERROR(10027, "create alert group error", "创建告警组错误"), + QUERY_ALL_ALERTGROUP_ERROR(10028, "query all alertgroup error", "查询告警组错误"), + LIST_PAGING_ALERT_GROUP_ERROR(10029, "list paging alert group error", "分页查询告警组错误"), + UPDATE_ALERT_GROUP_ERROR(10030, "update alert group error", "更新告警组错误"), + DELETE_ALERT_GROUP_ERROR(10031, "delete alert group error", "删除告警组错误"), + ALERT_GROUP_GRANT_USER_ERROR(10032, "alert group grant user error", "告警组授权用户错误"), + CREATE_DATASOURCE_ERROR(10033, "create datasource error", "创建数据源错误"), + UPDATE_DATASOURCE_ERROR(10034, "update datasource error", "更新数据源错误"), + QUERY_DATASOURCE_ERROR(10035, "query datasource error", "查询数据源错误"), + CONNECT_DATASOURCE_FAILURE(10036, "connect datasource failure", "建立数据源连接失败"), + CONNECTION_TEST_FAILURE(10037, "connection test failure", "测试数据源连接失败"), + DELETE_DATA_SOURCE_FAILURE(10038, "delete data source failure", "删除数据源失败"), + VERIFY_DATASOURCE_NAME_FAILURE(10039, "verify datasource name failure", "验证数据源名称失败"), + UNAUTHORIZED_DATASOURCE(10040, "unauthorized datasource", "未经授权的数据源"), + AUTHORIZED_DATA_SOURCE(10041, "authorized data source", "授权数据源失败"), + LOGIN_SUCCESS(10042, "login success", "登录成功"), + USER_LOGIN_FAILURE(10043, "user login failure", "用户登录失败"), + LIST_WORKERS_ERROR(10044, "list workers error", "查询worker列表错误"), + LIST_MASTERS_ERROR(10045, "list masters error", "查询master列表错误"), + UPDATE_PROJECT_ERROR(10046, "update project error", "更新项目信息错误"), + QUERY_PROJECT_DETAILS_BY_CODE_ERROR(10047, "query project details by code error", "查询项目详细信息错误"), + CREATE_PROJECT_ERROR(10048, "create project error", "创建项目错误"), + LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR( + 10049, "login user query project list paging error", "分页查询项目列表错误"), + DELETE_PROJECT_ERROR(10050, "delete project error", "删除项目错误"), + QUERY_UNAUTHORIZED_PROJECT_ERROR(10051, "query unauthorized project error", "查询未授权项目错误"), + QUERY_AUTHORIZED_PROJECT(10052, "query authorized project", "查询授权项目错误"), + QUERY_QUEUE_LIST_ERROR(10053, "query queue list error", "查询队列列表错误"), + CREATE_RESOURCE_ERROR(10054, "create resource error", "创建资源错误"), + UPDATE_RESOURCE_ERROR(10055, "update resource error", "更新资源错误"), + QUERY_RESOURCES_LIST_ERROR(10056, "query resources list error", "查询资源列表错误"), + QUERY_RESOURCES_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"), + DELETE_RESOURCE_ERROR(10058, "delete resource error", "删除资源错误"), + VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR( + 10059, "verify resource by name and type error", "资源名称或类型验证错误"), + VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060, "view resource file online error", "查看资源文件错误"), + CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061, "create resource file online error", "创建资源文件错误"), + RESOURCE_FILE_CATNOT_BE_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"), + EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"), + DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"), + CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"), + VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"), + UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"), + QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( + 10068, "query udf function list paging error", "分页查询UDF函数列表错误"), + QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"), + VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"), + DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"), + AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"), + AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"), + UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"), + AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"), + CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"), + UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"), + PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"), + OFFLINE_SCHEDULE_ERROR(10079, "offline schedule error", "下线调度配置错误"), + QUERY_SCHEDULE_LIST_PAGING_ERROR(10080, "query schedule list paging error", "分页查询调度配置列表错误"), + QUERY_SCHEDULE_LIST_ERROR(10081, "query schedule list error", "查询调度配置列表错误"), + QUERY_TASK_LIST_PAGING_ERROR(10082, "query task list paging error", "分页查询任务列表错误"), + QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083, "query task record list paging error", "分页查询任务记录错误"), + CREATE_TENANT_ERROR(10084, "create tenant error", "创建租户错误"), + QUERY_TENANT_LIST_PAGING_ERROR(10085, "query tenant list paging error", "分页查询租户列表错误"), + QUERY_TENANT_LIST_ERROR(10086, "query tenant list error", "查询租户列表错误"), + UPDATE_TENANT_ERROR(10087, "update tenant error", "更新租户错误"), + DELETE_TENANT_BY_ID_ERROR(10088, "delete tenant by id error", "删除租户错误"), + VERIFY_OS_TENANT_CODE_ERROR(10089, "verify os tenant code error", "操作系统租户验证错误"), + CREATE_USER_ERROR(10090, "create user error", "创建用户错误"), + QUERY_USER_LIST_PAGING_ERROR(10091, "query user list paging error", "分页查询用户列表错误"), + UPDATE_USER_ERROR(10092, "update user error", "更新用户错误"), + DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"), + GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"), + GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"), + GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"), + GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"), + GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"), + USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"), + VERIFY_USERNAME_ERROR(10100, "verify username error", "用户名验证错误"), + UNAUTHORIZED_USER_ERROR(10101, "unauthorized user error", "查询未授权用户错误"), + AUTHORIZED_USER_ERROR(10102, "authorized user error", "查询授权用户错误"), + QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"), + DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR( + 10104, "download task instance log file error", "下载任务日志文件错误"), + CREATE_PROCESS_DEFINITION_ERROR(10105, "create process definition error", "创建工作流错误"), + VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR( + 10106, "verify process definition name unique error", "工作流定义名称验证错误"), + UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"), + RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"), + QUERY_DETAIL_OF_PROCESS_DEFINITION_ERROR( + 10109, "query detail of process definition error", "查询工作流详细信息错误"), + QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"), + ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR( + 10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"), + GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR( + 10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"), + QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR( + 10113, "query process instance list paging error", "分页查询工作流实例列表错误"), + QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR( + 10114, "query task list by process instance id error", "查询任务实例列表错误"), + UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"), + QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"), + DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"), + QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR( + 10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"), + QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR( + 10119, + "query parent process instance detail info by sub process instance id error", + "查询子流程该工作流实例错误"), + QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR( + 10120, "query process instance all variables error", "查询工作流自定义变量信息错误"), + ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR( + 10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"), + QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR( + 10122, "query process definition list paging error", "分页查询工作流定义列表错误"), + SIGN_OUT_ERROR(10123, "sign out error", "退出错误"), + OS_TENANT_CODE_HAS_ALREADY_EXISTS(10124, "os tenant code has already exists", "操作系统租户已存在"), + IP_IS_EMPTY(10125, "ip is empty", "IP地址不能为空"), + SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE( + 10126, "schedule release is already {0}", "调度配置上线错误[{0}]"), + CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"), + QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"), + QUEUE_VALUE_EXIST(10129, "queue value {0} already exists", "队列值[{0}]已存在"), + QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"), + UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"), + NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"), + VERIFY_QUEUE_ERROR(10133, "verify queue error", "验证队列信息错误"), + NAME_NULL(10134, "name must be not null", "名称不能为空"), + NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"), + SAVE_ERROR(10136, "save error", "保存错误"), + DELETE_PROJECT_ERROR_DEFINES_NOT_NULL( + 10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"), + BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR( + 10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误: {0}"), + PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"), + PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"), + SCHEDULE_START_TIME_END_TIME_SAME( + 10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"), + DELETE_TENANT_BY_ID_FAIL( + 10142, + "delete tenant by id fail, for there are {0} process instances in executing using it", + "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), + DELETE_TENANT_BY_ID_FAIL_DEFINES( + 10143, + "delete tenant by id fail, for there are {0} process definitions using it", + "删除租户失败,有[{0}]个工作流定义正在使用"), + DELETE_TENANT_BY_ID_FAIL_USERS( + 10144, + "delete tenant by id fail, for there are {0} users using it", + "删除租户失败,有[{0}]个用户正在使用"), + DELETE_WORKER_GROUP_BY_ID_FAIL( + 10145, + "delete worker group by id fail, for there are {0} process instances in executing using it", + "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), + QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"), + DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"), + USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"), + COPY_PROCESS_DEFINITION_ERROR( + 10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"), + MOVE_PROCESS_DEFINITION_ERROR( + 10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"), + SWITCH_PROCESS_DEFINITION_VERSION_ERROR( + 10151, "Switch process definition version error", "切换工作流版本出错"), + SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR( + 10152, + "Switch process definition version error: not exists process definition, [process definition id {0}]", + "切换工作流版本出错:工作流不存在,[工作流id {0}]"), + SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR( + 10153, + "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", + "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"), + QUERY_PROCESS_DEFINITION_VERSIONS_ERROR( + 10154, "query process definition versions error", "查询工作流历史版本信息出错"), + DELETE_PROCESS_DEFINITION_VERSION_ERROR( + 10156, "delete process definition version error", "删除工作流历史版本出错"), + + QUERY_USER_CREATED_PROJECT_ERROR( + 10157, "query user created project error error", "查询用户创建的项目错误"), + PROCESS_DEFINITION_CODES_IS_EMPTY(10158, "process definition codes is empty", "工作流CODES不能为空"), + BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"), + BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"), + QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"), + + QUERY_WORKFLOW_LEVEL_IN_LINEAGE_ERROR( + 1016101, "query workflow level in lineage error", "查询血缘中的工作流层级失败"), + + QUERY_WORKFLOW_LINEAGE_ERROR_NOT_EXIST_PROCESS_INSTANCE( + 1016102, + "query workflow lineage error, not exist process instance [{0}]", + "查询血缘失败,不存在工作流实例[{0}]"), + QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR( + 10162, "query authorized and user created project error error", "查询授权的和用户创建的项目错误"), + DELETE_PROCESS_DEFINITION_EXECUTING_FAIL( + 10163, + "delete process definition [{0}] fail, for there are [{1}] process instances in executing using it", + "删除工作流定义[{0}]失败,有[{1}]个运行中的工作流实例正在使用"), + CHECK_OS_TENANT_CODE_ERROR( + 10164, + "Tenant code invalid, should follow linux's users naming conventions", + "非法的租户名,需要遵守 Linux 用户命名规范"), + FORCE_TASK_SUCCESS_ERROR(10165, "force task success error", "强制成功任务实例错误"), + TASK_INSTANCE_STATE_OPERATION_ERROR( + 10166, + "the status of task instance {0} is {1},Cannot perform force success operation", + "任务实例[{0}]的状态是[{1}],无法执行强制成功操作"), + DATASOURCE_TYPE_NOT_EXIST(10167, "data source type not exist", "数据源类型不存在"), + PROCESS_DEFINITION_NAME_EXIST( + 10168, "process definition name {0} already exists", "工作流定义名称[{0}]已存在"), + DATASOURCE_DB_TYPE_ILLEGAL(10169, "datasource type illegal", "数据源类型参数不合法"), + DATASOURCE_PORT_ILLEGAL(10170, "datasource port illegal", "数据源端口参数不合法"), + DATASOURCE_OTHER_PARAMS_ILLEGAL(10171, "datasource other params illegal", "数据源其他参数不合法"), + DATASOURCE_NAME_ILLEGAL(10172, "datasource name illegal", "数据源名称不合法"), + DATASOURCE_HOST_ILLEGAL(10173, "datasource host illegal", "数据源HOST不合法"), + DELETE_WORKER_GROUP_NOT_EXIST(10174, "delete worker group not exist ", "删除worker分组不存在"), + CREATE_WORKER_GROUP_FORBIDDEN_IN_DOCKER( + 10175, "create worker group forbidden in docker ", "创建worker分组在docker中禁止"), + DELETE_WORKER_GROUP_FORBIDDEN_IN_DOCKER( + 10176, "delete worker group forbidden in docker ", "删除worker分组在docker中禁止"), + WORKER_ADDRESS_INVALID(10177, "worker address {0} invalid", "worker地址[{0}]无效"), + QUERY_WORKER_ADDRESS_LIST_FAIL(10178, "query worker address list fail ", "查询worker地址列表失败"), + TRANSFORM_PROJECT_OWNERSHIP( + 10179, "Please transform project ownership [{0}]", "请先转移项目所有权[{0}]"), + QUERY_ALERT_GROUP_ERROR(10180, "query alert group error", "查询告警组错误"), + CURRENT_LOGIN_USER_TENANT_NOT_EXIST( + 10181, "the tenant of the currently login user is not specified", "未指定当前登录用户的租户"), + REVOKE_PROJECT_ERROR(10182, "revoke project error", "撤销项目授权错误"), + QUERY_AUTHORIZED_USER(10183, "query authorized user error", "查询拥有项目权限的用户错误"), + + DATASOURCE_CONNECT_REJECT_KEYWORD( + 10184, + "data source connection does not allow the [{0}] keyword", + "数据源连接参数不允许使用[{0}]关键字"), + PROJECT_NOT_EXIST(10190, "This project was not found. Please refresh page.", "该项目不存在,请刷新页面"), + TASK_INSTANCE_HOST_IS_NULL(10191, "task instance host is null", "任务实例host为空"), + QUERY_EXECUTING_WORKFLOW_ERROR(10192, "query executing workflow error", "查询运行的工作流实例错误"), + DELETE_PROCESS_DEFINITION_USE_BY_OTHER_FAIL( + 10193, + "delete process definition [{0}] fail, cause used by other tasks: {1}", + "删除工作流[{0}]失败,被其他任务引用:{1}"), + DELETE_TASK_USE_BY_OTHER_FAIL( + 10194, + "delete task {0} fail, cause used by other tasks: {1}", + "删除任务 {0} 失败,被其他任务引用:{1}"), + TASK_WITH_DEPENDENT_ERROR(10195, "task used in other tasks", "删除被其他任务引用"), + DELETE_PROCESS_DEFINITION_SCHEDULE_CRON_ERROR( + 10196, "delete workflow [{0}] schedule error", "删除工作流 [{0}] 调度配置错误"), + DELETE_PROCESS_DEFINITION_SUCCESS(10197, "delete workflow [{0}] success", "删除工作流 [{0}] 成功"), + LIST_TASK_TYPE_ERROR(10200, "list task type error", "查询任务类型列表错误"), + DELETE_TASK_TYPE_ERROR(10200, "delete task type error", "删除任务类型错误"), + ADD_TASK_TYPE_ERROR(10200, "add task type error", "添加任务类型错误"), + + LIST_ALERT_SERVERS_ERROR(10198, "list AlertServer error", "查询AlertServer列表错误"), + + LIST_API_SERVERS_ERROR(10199, "list ApiServer error", "查询ApiServer列表错误"), + + LISTING_EXECUTING_WORKFLOWS_BY_MASTER_ERROR( + 10200, "listing executing workflows by master address error", "查询Master中运行的工作流实例信息错误"), + LISTING_DISPATCHING_TASK_INSTANCES_BY_MASTER_ERROR( + 10201, + "listing dispatching TaskInstance by master address error", + "查询Master中正在分发的任务实例信息错误"), + LISTING_EXECUTING_TASK_EXECUTION_CONTEXT_BY_WORKER_ERROR( + 10202, + "listing executing TaskExecutionContext by worker address error", + "查询Worker中正在执行的任务上下文信息错误"), + QUERY_TASK_EXECUTION_CONTEXT_DETAIL_ERROR( + 10203, "query task execution context detail error", "查询Worker中的task上下文详情错误"), + CLEAN_TASK_INSTANCE_ERROR(10204, "clean task instance state error", "清除任务实例状态错误"), + CLEAN_TASK_INSTANCE_ERROR_WORKFLOW_INSTANCE_IS_RUNNING( + 10205, + "clean task instance state error, the related workflow instance is running", + "清除任务实例状态错误,对应的工作流实例在运行中"), + + CLEAN_TASK_INSTANCE_ERROR_COMMAND_ALREADY_EXIST( + 10206, + "clean task instance state error, the related workflow instance is already exist a state clean operation, please try later", + "清除任务实例状态错误,对应的工作流实例已经有一个清除状态的操作,请稍后再试"), + QUERY_EXECUTING_WORKFLOWS_DETAIL_BY_ID( + 10207, "query executing workflows detail by id error", "查询Master中运行的工作流实例详情错误"), + + CLEAN_TASK_INSTANCE_ERROR_CURRENT_TASK_INSTANCE_IS_IN_ISOLATION_CHAIN( + 10208, + "clean task instance state error, the current task instance is in coronation chain, please cancel the isolation first", + "清除任务实例状态错误,当前的任务实例在隔离链,请先解除隔离"), + + CLEAN_TASK_INSTANCE_ERROR_CURRENT_TASK_INSTANCE_IS_IN_PAUSE( + 10209, + "clean task instance state error, the current task instance is in coronation chain or isolation pause, no need to clean, it will be restored automatically when the condition is met", + "清除任务实例状态错误,当前的任务实例在加冕或隔离暂停中,无需清除重跑,条件允许后将自动恢复"), + + PROJECT_CODE_NOT_EXIST( + 10210, "This project {0} was not found. Please refresh page.", "项目{0}不存在,请刷新页面"), + CLEAN_TASK_INSTANCE_ERROR_UPSTREAM_WORKFLOW_INSTANCE_IS_RUNNING( + 10211, + "clean task instance state error, the related upstream workflow instance [{0}]is running", + "清除任务实例状态错误,对应工作流实例为子工作流,但其引用该工作流的上游工作流[{0}]正在运行中"), + + DEPENDENT_RUN_ERROR(10212, "dependent run error", "依赖链运行操作失败"), + + CREATE_BATCH_RESOURCE_NOTES(10204, "create batch resource error", "创建资源错误"), + QUERY_PROJECT_DETAILS_BY_NAME_ERROR(10205, "query project details by name error", "查询项目详细信息错误"), + UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), + UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), + RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"), + RESOURCE_EXIST(20005, "resource already exists", "资源已存在"), + RESOURCE_SUFFIX_NOT_SUPPORT_VIEW( + 20006, "resource suffix do not support online viewing", "资源文件后缀不支持查看"), + RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit", "上传资源文件大小超过限制"), + RESOURCE_SUFFIX_FORBID_CHANGE( + 20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"), + UDF_RESOURCE_SUFFIX_NOT_JAR( + 20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"), + HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"), + RESOURCE_FILE_EXIST( + 20011, + "resource file {0} already exists in hdfs,please delete it or change name!", + "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"), + RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists !", "资源文件[{0}]不存在"), + UDF_RESOURCE_IS_BOUND( + 20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"), + RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"), + PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"), + RESOURCE_NOT_EXIST_OR_NO_PERMISSION( + 20016, + "resource not exist or no permission,please view the task node and remove error resource", + "请检查任务节点并移除无权限或者已删除的资源"), + RESOURCE_IS_AUTHORIZED( + 20017, + "resource is authorized to user {0},suffix not allowed to be modified", + "资源文件已授权其他用户[{0}],后缀不允许修改"), + RESOURCE_HAS_FOLDER( + 20018, "There are files or folders in the current directory:{0}", "当前目录下有文件或文件夹[{0}]"), + + BATCH_RESOURCE_NAME_REPEAT(20019, "duplicate file names in this batch", "此批处理中存在重复的文件名"), + RESOURCE_FILE_IS_EMPTY(20020, "resource file [{0}] is empty", "资源文件 [{0}] 内容不能为空"), + + RESOURCE_OWNER_OR_TENANT_CHANGE_ERROR( + 20021, + "resource[{0}] owner is deleted or the tenant is unbound, the operation is not allowed", + "资源[{0}]所属用户被删除或者其租户被解除绑定, 无法进行该操作"), + + RESOURCE_CREATE_ERROR(20022, "resource[{0}] create error", "资源[{0}]创建失败"), + + UPLOAD_FOLDER_WARNING( + 20023, + "upload folder warning, create resource failed[{0}], delete resource failed[{1}], other resources upload success", + "文件夹上传警告,以下资源创建失败[{0}], 以下资源覆盖删除失败[{1}], 其余均已上传成功"), + + UPLOAD_FOLDER_CREATE_WARNING( + 20024, + "upload folder warning, create resource failed[{0}], other resources upload success", + "文件夹上传警告,以下资源创建失败[{0}], 其余均已上传成功"), + + UPLOAD_FOLDER_DELETE_WARNING( + 20025, + "upload folder warning, delete resource failed[{0}], other resources upload success", + "文件夹上传警告, 以下资源覆盖删除失败[{0}], 其余均已上传成功"), + + RESOURCE_LIST_IS_USED( + 20026, "resource file is used by process definition, [{0}]", "资源文件被上线的流程定义使用了, [{0}]"), + + RESOURCE_IS_USED_BY_PROCESS( + 20027, "resource file is used by process definition", "资源文件被流程定义使用了"), + + USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"), + USER_NO_OPERATION_PROJECT_PERM( + 30002, + "User [{0}] does not have this operation permission under the [{1}] project", + "用户[{0}]在[{1}]项目下无此操作权限"), + + USER_CAN_NOT_OPERATION( + 30003, "User [{0}] does not have permission for Project [{1}]", "用户[{0}]在项目[{1}]下无此权限"), + + PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist", "工作流实例[{0}]不存在"), + PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists", "工作流实例[{0}]已存在"), + PROCESS_DEFINE_NOT_EXIST(50003, "process definition [{0}] does not exist", "工作流定义[{0}]不存在"), + PROCESS_DEFINE_NOT_RELEASE( + 50004, + "process definition {0} process version {1} not on line", + "工作流定义[{0}] 工作流版本[{1}]不是上线状态"), + SUB_PROCESS_DEFINE_NOT_RELEASE( + 50004, "exist sub process definition not on line", "存在子工作流定义不是上线状态"), + PROCESS_INSTANCE_ALREADY_CHANGED( + 50005, "the status of process instance {0} is already {1}", "工作流实例[{0}]的状态已经是[{1}]"), + PROCESS_INSTANCE_STATE_OPERATION_ERROR( + 50006, + "the status of process instance {0} is {1},Cannot perform {2} operation", + "工作流实例[{0}]的状态是[{1}],无法执行[{2}]操作"), + SUB_PROCESS_INSTANCE_NOT_EXIST( + 50007, "the task belong to process instance does not exist", "子工作流实例不存在"), + PROCESS_DEFINE_NOT_ALLOWED_EDIT( + 50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"), + PROCESS_INSTANCE_EXECUTING_COMMAND( + 50009, + "process instance {0} is executing the command, please wait ...", + "工作流实例[{0}]正在执行命令,请稍等..."), + PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE( + 50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"), + TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"), + COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"), + COUNT_PROCESS_DEFINITION_USER_ERROR( + 50013, "count process definition user error", "查询各用户流程定义数错误"), + START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"), + BATCH_START_PROCESS_INSTANCE_ERROR( + 50014, "batch start process instance error: {0}", "批量运行工作流实例错误: {0}"), + PROCESS_INSTANCE_ERROR(50014, "process instance delete error: {0}", "工作流实例删除[{0}]错误"), + EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"), + CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "工作流定义错误"), + QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR( + 50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"), + DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"), + DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"), + PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"), + PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"), + PROCESS_DEFINE_STATE_ONLINE( + 50021, "process definition [{0}] is already on line", "工作流定义[{0}]已上线"), + DELETE_PROCESS_DEFINE_BY_CODE_ERROR( + 50022, "delete process definition by code error", "删除工作流定义错误"), + SCHEDULE_CRON_STATE_ONLINE( + 50023, "the status of schedule {0} is already on line", "调度配置[{0}]已上线"), + DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"), + BATCH_DELETE_PROCESS_DEFINE_ERROR( + 50025, "batch delete process definition error", "批量删除工作流定义错误"), + BATCH_DELETE_PROCESS_DEFINE_BY_CODES_ERROR( + 50026, "batch delete process definition by codes error {0}", "批量删除工作流定义错误 [{0}]"), + DELETE_PROCESS_DEFINE_BY_CODES_ERROR( + 50026, "delete process definition by codes {0} error", "删除工作流定义[{0}]错误"), + TENANT_NOT_SUITABLE( + 50027, + "there is not any tenant suitable, please choose a tenant available.", + "没有合适的租户,请选择可用的租户"), + EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"), + BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR( + 50028, "batch export process definition by ids error", "批量导出工作流定义错误"), + IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"), + TASK_DEFINE_NOT_EXIST(50030, "task definition [{0}] does not exist", "任务定义[{0}]不存在"), + CREATE_PROCESS_TASK_RELATION_ERROR(50032, "create process task relation error", "创建工作流任务关系错误"), + PROCESS_TASK_RELATION_NOT_EXIST( + 50033, "process task relation [{0}] does not exist", "工作流任务关系[{0}]不存在"), + PROCESS_TASK_RELATION_EXIST( + 50034, + "process task relation is already exist, processCode:[{0}]", + "工作流任务关系已存在, processCode:[{0}]"), + PROCESS_DAG_IS_EMPTY(50035, "process dag is empty", "工作流dag是空"), + CHECK_PROCESS_TASK_RELATION_ERROR(50036, "check process task relation error", "工作流任务关系参数错误"), + CREATE_TASK_DEFINITION_ERROR(50037, "create task definition error", "创建任务错误"), + UPDATE_TASK_DEFINITION_ERROR(50038, "update task definition error", "更新任务定义错误"), + QUERY_TASK_DEFINITION_VERSIONS_ERROR( + 50039, "query task definition versions error", "查询任务历史版本信息出错"), + SWITCH_TASK_DEFINITION_VERSION_ERROR(50040, "Switch task definition version error", "切换任务版本出错"), + DELETE_TASK_DEFINITION_VERSION_ERROR( + 50041, "delete task definition version error", "删除任务历史版本出错"), + DELETE_TASK_DEFINE_BY_CODE_ERROR(50042, "delete task definition by code error", "删除任务定义错误"), + QUERY_DETAIL_OF_TASK_DEFINITION_ERROR( + 50043, "query detail of task definition error", "查询任务详细信息错误"), + QUERY_TASK_DEFINITION_LIST_PAGING_ERROR( + 50044, "query task definition list paging error", "分页查询任务定义列表错误"), + TASK_DEFINITION_NAME_EXISTED( + 50045, "task definition name [{0}] already exists", "任务定义名称[{0}]已经存在"), + RELEASE_TASK_DEFINITION_ERROR(50046, "release task definition error", "上线任务错误"), + MOVE_PROCESS_TASK_RELATION_ERROR(50047, "move process task relation error", "移动任务到其他工作流错误"), + DELETE_TASK_PROCESS_RELATION_ERROR(50048, "delete process task relation error", "删除工作流任务关系错误"), + QUERY_TASK_PROCESS_RELATION_ERROR(50049, "query process task relation error", "查询工作流任务关系错误"), + TASK_DEFINE_STATE_ONLINE(50050, "task definition [{0}] is already on line", "任务定义[{0}]已上线"), + TASK_HAS_DOWNSTREAM(50051, "Task exists downstream [{0}] dependence", "任务存在下游[{0}]依赖"), + TASK_HAS_UPSTREAM(50052, "Task [{0}] exists upstream dependence", "任务[{0}]存在上游依赖"), + MAIN_TABLE_USING_VERSION(50053, "the version that the master table is using", "主表正在使用该版本"), + PROJECT_PROCESS_NOT_MATCH(50054, "the project and the process is not match", "项目和工作流不匹配"), + DELETE_EDGE_ERROR(50055, "delete edge error", "删除工作流任务连接线错误"), + NOT_SUPPORT_UPDATE_TASK_DEFINITION( + 50056, "task state does not support modification", "当前任务不支持修改"), + NOT_SUPPORT_COPY_TASK_TYPE(50057, "task type [{0}] does not support copy", "不支持复制的任务类型[{0}]"), + BATCH_EXECUTE_PROCESS_INSTANCE_ERROR( + 50058, "change process instance status error: {0}", "修改工作实例状态错误: {0}"), + PROCESS_DEFINE_RELEASE( + 50059, + "process definition {0} process version {1} on line", + "工作流定义[{0}] 工作流版本[{1}]是上线状态"), + TASK_HAVE_EMPTY_LOCAL_PARAM(50060, "task {0} have empty local parameter", "任务[{0}]存在为空的本地参数"), + HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"), + STORAGE_NOT_STARTUP(60002, "storage not startup", "存储未启用"), + S3_CANNOT_RENAME(60003, "directory cannot be renamed", "S3无法重命名文件夹"), + + /** for monitor */ + QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"), + + CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"), + GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"), + QUERY_ACCESSTOKEN_LIST_PAGING_ERROR( + 70012, "query access token list paging error", "分页查询访问token列表错误"), + UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"), + DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"), + ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"), + QUERY_ACCESSTOKEN_BY_USER_ERROR(70016, "query access token by user error", "查询访问指定用户的token错误"), + + COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"), + NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"), + START_TIME_BIGGER_THAN_END_TIME_ERROR( + 80003, "start time bigger than end time error", "开始时间在结束时间之后错误"), + QUEUE_COUNT_ERROR(90001, "queue count error", "查询队列数据错误"), + + KERBEROS_STARTUP_STATE(100001, "get kerberos startup state error", "获取kerberos启动状态错误"), + + // audit log + QUERY_AUDIT_LOG_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"), + + // plugin + PLUGIN_NOT_A_UI_COMPONENT( + 110001, "query plugin error, this plugin has no UI component", "查询插件错误,此插件无UI组件"), + QUERY_PLUGINS_RESULT_IS_NULL( + 110002, + "query alarm plugins result is empty, please check the startup status of the alarm component and confirm that the relevant alarm plugin is successfully registered", + "查询告警插件为空, 请检查告警组件启动状态并确认相关告警插件已注册成功"), + QUERY_PLUGINS_ERROR(110003, "query plugins error", "查询插件错误"), + QUERY_PLUGIN_DETAIL_RESULT_IS_NULL(110004, "query plugin detail result is null", "查询插件详情结果为空"), + + UPDATE_ALERT_PLUGIN_INSTANCE_ERROR( + 110005, "update alert plugin instance error", "更新告警组和告警组插件实例错误"), + DELETE_ALERT_PLUGIN_INSTANCE_ERROR( + 110006, "delete alert plugin instance error", "删除告警组和告警组插件实例错误"), + GET_ALERT_PLUGIN_INSTANCE_ERROR(110007, "get alert plugin instance error", "获取告警组和告警组插件实例错误"), + CREATE_ALERT_PLUGIN_INSTANCE_ERROR( + 110008, "create alert plugin instance error", "创建告警组和告警组插件实例错误"), + QUERY_ALL_ALERT_PLUGIN_INSTANCE_ERROR( + 110009, "query all alert plugin instance error", "查询所有告警实例失败"), + PLUGIN_INSTANCE_ALREADY_EXIT(110010, "plugin instance already exit", "该告警插件实例已存在"), + LIST_PAGING_ALERT_PLUGIN_INSTANCE_ERROR( + 110011, "query plugin instance page error", "分页查询告警实例失败"), + DELETE_ALERT_PLUGIN_INSTANCE_ERROR_HAS_ALERT_GROUP_ASSOCIATED( + 110012, + "failed to delete the alert instance, there is an alarm group associated with this alert instance", + "删除告警实例失败,存在与此告警实例关联的警报组"), + PROCESS_DEFINITION_VERSION_IS_USED( + 110013, "this process definition version is used", "此工作流定义版本被使用"), + + CREATE_ENVIRONMENT_ERROR(120001, "create environment error", "创建环境失败"), + ENVIRONMENT_NAME_EXISTS(120002, "this environment name [{0}] already exists", "环境名称[{0}]已经存在"), + ENVIRONMENT_NAME_IS_NULL(120003, "this environment name shouldn't be empty.", "环境名称不能为空"), + ENVIRONMENT_CONFIG_IS_NULL(120004, "this environment config shouldn't be empty.", "环境配置信息不能为空"), + UPDATE_ENVIRONMENT_ERROR(120005, "update environment [{0}] info error", "更新环境[{0}]信息失败"), + DELETE_ENVIRONMENT_ERROR(120006, "delete environment error", "删除环境信息失败"), + DELETE_ENVIRONMENT_RELATED_TASK_EXISTS( + 120007, + "this environment has been used in tasks,so you can't delete it.", + "该环境已经被任务使用,所以不能删除该环境信息"), + QUERY_ENVIRONMENT_BY_NAME_ERROR(1200008, "not found environment [{0}] ", "查询环境名称[{0}]信息不存在"), + QUERY_ENVIRONMENT_BY_CODE_ERROR(1200009, "not found environment [{0}] ", "查询环境编码[{0}]不存在"), + QUERY_ENVIRONMENT_ERROR(1200010, "login user query environment error", "分页查询环境列表错误"), + VERIFY_ENVIRONMENT_ERROR(1200011, "verify environment error", "验证环境信息错误"), + GET_RULE_FORM_CREATE_JSON_ERROR( + 1200012, "get rule form create json error", "获取规则 FROM-CREATE-JSON 错误"), + QUERY_RULE_LIST_PAGING_ERROR(1200013, "query rule list paging error", "获取规则分页列表错误"), + QUERY_RULE_LIST_ERROR(1200014, "query rule list error", "获取规则列表错误"), + QUERY_RULE_INPUT_ENTRY_LIST_ERROR(1200015, "query rule list error", "获取规则列表错误"), + QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR( + 1200016, "query execute result list paging error", "获取数据质量任务结果分页错误"), + GET_DATASOURCE_OPTIONS_ERROR(1200017, "get datasource options error", "获取数据源Options错误"), + GET_DATASOURCE_TABLES_ERROR(1200018, "get datasource tables error", "获取数据源表列表错误"), + GET_DATASOURCE_TABLE_COLUMNS_ERROR(1200019, "get datasource table columns error", "获取数据源表列名错误"), + + CREATE_CLUSTER_ERROR(120020, "create cluster error", "创建集群失败"), + CLUSTER_NAME_EXISTS(120021, "this cluster name [{0}] already exists", "集群名称[{0}]已经存在"), + CLUSTER_NAME_IS_NULL(120022, "this cluster name shouldn't be empty.", "集群名称不能为空"), + CLUSTER_CONFIG_IS_NULL(120023, "this cluster config shouldn't be empty.", "集群配置信息不能为空"), + UPDATE_CLUSTER_ERROR(120024, "update cluster [{0}] info error", "更新集群[{0}]信息失败"), + DELETE_CLUSTER_ERROR(120025, "delete cluster error", "删除集群信息失败"), + DELETE_CLUSTER_RELATED_TASK_EXISTS( + 120026, + "this cluster has been used in tasks,so you can't delete it.", + "该集群已经被任务使用,所以不能删除该集群信息"), + QUERY_CLUSTER_BY_NAME_ERROR(1200027, "not found cluster [{0}] ", "查询集群名称[{0}]信息不存在"), + QUERY_CLUSTER_BY_CODE_ERROR(1200028, "not found cluster [{0}] ", "查询集群编码[{0}]不存在"), + QUERY_CLUSTER_ERROR(1200029, "login user query cluster error", "分页查询集群列表错误"), + VERIFY_CLUSTER_ERROR(1200030, "verify cluster error", "验证集群信息错误"), + CLUSTER_PROCESS_DEFINITIONS_IS_INVALID( + 1200031, "cluster worker groups is invalid format", "集群关联的工作组参数解析错误"), + UPDATE_CLUSTER_PROCESS_DEFINITION_RELATION_ERROR( + 1200032, + "You can't modify the process definition, because the process definition [{0}] and this cluster [{1}] already be used in the task [{2}]", + "您不能修改集群选项,因为该工作流组 [{0}] 和 该集群 [{1}] 已经被用在任务 [{2}] 中"), + CLUSTER_NOT_EXISTS(120033, "this cluster can not found in db.", "集群配置数据库里查询不到为空"), + DELETE_CLUSTER_RELATED_NAMESPACE_EXISTS( + 120034, + "this cluster has been used in namespace,so you can't delete it.", + "该集群已经被命名空间使用,所以不能删除该集群信息"), + + TASK_GROUP_NAME_EXSIT( + 130001, "this task group name is repeated in a project", "该任务组名称在一个项目中已经使用"), + TASK_GROUP_SIZE_ERROR(130002, "task group size error", "任务组大小应该为大于1的整数"), + TASK_GROUP_STATUS_ERROR(130003, "task group status error", "任务组已经被关闭"), + TASK_GROUP_FULL(130004, "task group is full", "任务组已经满了"), + TASK_GROUP_USED_SIZE_ERROR( + 130005, "the used size number of task group is dirty", "任务组使用的容量发生了变化"), + TASK_GROUP_QUEUE_RELEASE_ERROR(130006, "failed to release task group queue", "任务组资源释放时出现了错误"), + TASK_GROUP_QUEUE_AWAKE_ERROR(130007, "awake waiting task failed", "任务组使唤醒等待任务时发生了错误"), + CREATE_TASK_GROUP_ERROR(130008, "create task group error", "创建任务组错误"), + UPDATE_TASK_GROUP_ERROR(130009, "update task group list error", "更新任务组错误"), + QUERY_TASK_GROUP_LIST_ERROR(130010, "query task group list error", "查询任务组列表错误"), + CLOSE_TASK_GROUP_ERROR(130011, "close task group error", "关闭任务组错误"), + START_TASK_GROUP_ERROR(130012, "start task group error", "启动任务组错误"), + QUERY_TASK_GROUP_QUEUE_LIST_ERROR(130013, "query task group queue list error", "查询任务组队列列表错误"), + TASK_GROUP_CACHE_START_FAILED(130014, "cache start failed", "任务组相关的缓存启动失败"), + ENVIRONMENT_WORKER_GROUPS_IS_INVALID( + 130015, "environment worker groups is invalid format", "环境关联的工作组参数解析错误"), + UPDATE_ENVIRONMENT_WORKER_GROUP_RELATION_ERROR( + 130016, + "You can't modify the worker group, because the worker group [{0}] and this environment [{1}] already be used in the task [{2}]", + "您不能修改工作组选项,因为该工作组 [{0}] 和 该环境 [{1}] 已经被用在任务 [{2}] 中"), + TASK_GROUP_QUEUE_ALREADY_START(130017, "task group queue already start", "节点已经获取任务组资源"), + TASK_GROUP_STATUS_CLOSED(130018, "The task group has been closed.", "任务组已经被关闭"), + TASK_GROUP_STATUS_OPENED(130019, "The task group has been opened.", "任务组已经被开启"), + NOT_ALLOW_TO_DISABLE_OWN_ACCOUNT(130020, "Not allow to disable your own account", "不能停用自己的账号"), + NOT_ALLOW_TO_DELETE_DEFAULT_ALARM_GROUP( + 130030, "Not allow to delete the default alarm group ", "不能删除默认告警组"), + TIME_ZONE_ILLEGAL(130031, "time zone [{0}] is illegal", "时区参数 [{0}] 不合法"), + + TASK_GROUP_USED_SIZE_NOT_EMPTY( + 130032, "the used size number of task group is not empty", "任务组已使用容量不为空"), + + QUERY_K8S_NAMESPACE_LIST_PAGING_ERROR( + 1300001, "login user query k8s namespace list paging error", "分页查询k8s名称空间列表错误"), + K8S_NAMESPACE_EXIST(1300002, "k8s namespace {0} already exists", "k8s命名空间[{0}]已存在"), + CREATE_K8S_NAMESPACE_ERROR(1300003, "create k8s namespace error", "创建k8s命名空间错误"), + UPDATE_K8S_NAMESPACE_ERROR(1300004, "update k8s namespace error", "更新k8s命名空间信息错误"), + K8S_NAMESPACE_NOT_EXIST(1300005, "k8s namespace {0} not exists", "命名空间ID[{0}]不存在"), + K8S_CLIENT_OPS_ERROR(1300006, "k8s error with exception {0}", "k8s操作报错[{0}]"), + VERIFY_K8S_NAMESPACE_ERROR(1300007, "verify k8s and namespace error", "验证k8s命名空间信息错误"), + DELETE_K8S_NAMESPACE_BY_ID_ERROR(1300008, "delete k8s namespace by id error", "删除命名空间错误"), + VERIFY_PARAMETER_NAME_FAILED(1300009, "The file name [{0}] verify failed", "文件命名[{0}]校验失败"), + STORE_OPERATE_CREATE_ERROR(1300010, "create the resource failed", "存储操作失败"), + GRANT_K8S_NAMESPACE_ERROR(1300011, "grant namespace error", "授权资源错误"), + QUERY_UNAUTHORIZED_NAMESPACE_ERROR( + 1300012, "query unauthorized namespace error", "查询未授权命名空间错误"), + QUERY_AUTHORIZED_NAMESPACE_ERROR(1300013, "query authorized namespace error", "查询授权命名空间错误"), + QUERY_CAN_USE_K8S_CLUSTER_ERROR( + 1300014, "login user query can used k8s cluster list error", "查询可用k8s集群错误"), + RESOURCE_FULL_NAME_TOO_LONG_ERROR(1300015, "resource's fullname is too long error", "资源文件名过长"), + TENANT_FULL_NAME_TOO_LONG_ERROR(1300016, "tenant's fullname is too long error", "租户名过长"), + USER_PASSWORD_LENGTH_ERROR(1300017, "user's password length error", "用户密码长度错误"), + QUERY_CAN_USE_K8S_NAMESPACE_ERROR( + 1300018, "login user query can used namespace list error", "查询可用命名空间错误"), + FILE_NAME_CONTAIN_RESTRICTIONS( + 1300019, "The file name [{0}] contain restrictions", "文件命名[{0}]包含限制内容"), + FILE_TYPE_IS_RESTRICTIONS(1300020, "The file [{0}] type is restrictions", "文件[{0}]类型为限制类型"), + + NO_CURRENT_OPERATING_PERMISSION( + 1400001, "The current user does not have this permission.", "当前用户无此权限"), + FUNCTION_DISABLED(1400002, "The current feature is disabled.", "当前功能已被禁用"), + SCHEDULE_TIME_NUMBER(1400003, "The number of complement dates exceed 100.", "补数日期个数超过100"), + DESCRIPTION_TOO_LONG_ERROR(1400004, "description is too long error", "描述过长"), + COMPLEMENT_COMMAND_SCHEDULE_TIME_EMPTY( + 1400005, "complement command schedule time can not be empty", "补数数据日期不可为空"), + START_TIME_CAN_NOT_AFTER_END_TIME(1400006, "start time can not after end time", "开始时间不可晚于结束时间"), + CONFIG_FILTER_EMPTY( + 1400007, + "complement time is empty after filtering according to the configuration", + "当前补数时间根据配置过滤后为空"), + PROJECT_NAME_TOO_LONG_ERROR(1400008, "project name is too long error", "项目名称过长"), + + NO_CURRENT_OPERATING_PERMISSION_FOR_RESOURCE( + 1400009, "[{0}] is exist, current user no [{0}] permission", "[{0}]已存在,当前用户无[{0}]权限"), + + ISOLATION_TASK_NOT_EXIST(1500000, "Isolation task not exist", "隔离任务不存在"), + + ISOLATION_TASK_USER_NO_PERMISSION( + 1500001, + "Current login user does not have isolation view permission", + "当前用户不具备当前项目隔离列表查看权限"), + ISOLATION_TASK_SUBMIT_ERROR(1500100, "Submit isolation task error", "提交隔离任务异常"), + ISOLATION_TASK_SUBMIT_ERROR_WORKFLOW_INSTANCE_NOT_SUPPORT( + 1500101, + "Submit isolation task error, relate workflow instance [{0}] is not support", + "提交隔离任务异常, 关联的工作流实例:[{0}]暂不支持该操作"), + ISOLATION_TASK_SUBMIT_ERROR_TASK_NOT_EXIST( + 1500103, + "Submit isolation task error, task: [{0}] is not exist", + "提交隔离任务异常, 任务不存在:[{0}]"), + ISOLATION_TASK_SUBMIT_ERROR_EXIST_SUB_ISOLATION_TASK( + 1500104, + "Submit isolation task error, workflow instance: [{0}] exist an sub isolation task", + "提交隔离任务异常, 工作流实例已经存在:[{0}]子隔离任务"), + ISOLATION_TASK_SUBMIT_ERROR_SEND_REQUEST_TO_MASTER_ERROR( + 1500105, + "Submit isolation task error, send request to master error", + "提交隔离任务异常,发送请求给Master异常"), + + ISOLATION_TASK_SUBMIT_USER_NO_PERMISSION( + 1500106, + "Current login user does not have isolation submit permission for project {0}", + "当前用户不具备{0}项目隔离的提交权限"), + + ISOLATION_TASK_ONLINE_ERROR(1500200, "Online isolation task error", "上线隔离任务异常"), + ISOLATION_TASK_ONLINE_ERROR_ALREADY_ONLINE( + 1500201, + "Online isolation task error, the isolation task is already online", + "上线隔离任务异常,该任务已处于隔离中"), + ISOLATION_TASK_ONLINE_ERROR_SEND_REQUEST_TO_MASTER_ERROR( + 1500202, + "Online isolation task error, send request to master error", + "上线隔离任务异常,发送隔离请求给Master异常"), + ISOLATION_TASK_CANCEL_ERROR(1500300, "Cancel isolation task error", "取消隔离任务异常"), + ISOLATION_TASK_CANCEL_ERROR_SEND_REQUEST_TO_MASTER_ERROR( + 1500301, + "Cancel isolation task error, send request to master error", + "取消隔离任务异常,发送隔离请求给Master异常"), + ISOLATION_TASK_CANCEL_ERROR_THE_ISOLATION_ALREADY_CANCEL( + 1500302, + "Cancel isolation task error, this task isolation is already been cancel", + "取消隔离任务异常,该隔离已经下线"), + ISOLATION_TASK_CANCEL_USER_NO_PERMISSION( + 1500304, + "Current login user does not have isolation cancel permission", + "当前用户不具备当前项目隔离取消权限"), + ISOLATION_TASK_LISTING_ERROR(1500400, "Listing isolation task error", "查询隔离任务列表异常"), + ISOLATION_TASK_DELETE_ERROR(1500500, "Delete isolation task error", "删除隔离任务异常"), + ISOLATION_TASK_DELETE_ERROR_IS_NOT_OFFLINE( + 1500501, + "Delete isolation task error, the task status is not offline", + "删除隔离任务异常,该隔离任务尚未下线"), + + CORONATION_TASK_PARSE_ERROR(1600000, "Coronation task parse error", "解析加冕任务异常"), + CORONATION_TASK_NOT_EXIST(1600001, "Coronation task not exist", "加冕任务不存在"), + CORONATION_TASK_PARSE_ERROR_TASK_NODE_NAME_IS_NOT_VALIDATED( + 1600002, + "Coronation task parse error, taskNode name is not validated", + "解析加冕任务异常, 任务名不正确"), + + CORONATION_TASK_PARSE_ERROR_TASK_NODE_ALREADY_EXIST( + 1600003, + "Coronation task parse error, workflowInstance: [{0}] taskNode:[{1}] is already in coronation", + "解析加冕任务异常, 工作流实例: [{0}] 任务: [{1}] 已经被加冕"), + + CORONATION_TASK_USER_NO_PERMISSION( + 1600004, + "Current login user does not have coronation view permission", + "当前用户不具备当前项目加冕列表查看权限"), + + CORONATION_TASK_SUBMIT_ERROR(1600100, "Coronation task submit error", "提交加冕任务异常"), + CORONATION_TASK_SUBMIT_USER_NO_PERMISSION( + 1600101, + "Current login user does not have coronation submit permission", + "当前用户不具备当前项目加冕提交权限"), + CORONATION_TASK_ONLINE_ERROR(1600200, "Coronation task online error", "上线加冕任务异常"), + CORONATION_TASK_CANCEL_ERROR(1600300, "Coronation task cancel error", "取消加冕任务异常"), + CORONATION_TASK_CANCEL_USER_NO_PERMISSION( + 1500301, + "Current login user does not have coronation cancel permission", + "当前用户不具备当前项目加冕取消权限"), + CORONATION_TASK_LISTING_ERROR(1600400, "Coronation task listing error", "查询加冕任务异常"), + + OPERATION_ALREADY_EXIST(1800000, "operation already exist", "操作已存在"), + + DATA_SOURCE_HAD_USED( + 1600000, + "data source already used (projectName - workflowName - taskName):{0}", + "数据源正在被使用(项目名 - 工作流名 - 任务名):{0}"), + + COMPLEMENT_CALENDAR_ERROR(1700000, "complement calendar error", "补数日历错误"), + + COMPLEMENT_MISSING_CALENDAR_PARAM( + 1700001, "complement missing calendar param [{0}]", "补数缺少日历参数 [{0}]"), + + COMPLEMENT_QUERY_DATE_LIST_ERROR(1700002, "complement query date list error", "补数查询日期列表错误"), + ; + + private final int code; + private final String enMsg; + private final String zhMsg; + + Status(int code, String enMsg, String zhMsg) { + this.code = code; + this.enMsg = enMsg; + this.zhMsg = zhMsg; + } + + public int getCode() { + return this.code; + } + + public String getMsg() { + if (Locale.SIMPLIFIED_CHINESE + .getLanguage() + .equals(LocaleContextHolder.getLocale().getLanguage())) { + return this.zhMsg; + } else { + return this.enMsg; + } + } + + /** Retrieve Status enum entity by status code. */ + public static Optional findStatusBy(int code) { + for (Status status : Status.values()) { + if (code == status.getCode()) { + return Optional.of(status); + } + } + return Optional.empty(); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/TaskTimeoutStrategy.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/TaskTimeoutStrategy.java new file mode 100644 index 000000000..f304a420e --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/TaskTimeoutStrategy.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +/** task timeout strategy */ +public enum TaskTimeoutStrategy { + + /** 0 warn 1 failed 2 warn+failed */ + WARN(0, "warn"), + FAILED(1, "failed"), + WARNFAILED(2, "warnfailed"); + + TaskTimeoutStrategy(int code, String descp) { + this.code = code; + this.descp = descp; + } + + @EnumValue private final int code; + private final String descp; + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } + + public static TaskTimeoutStrategy of(int status) { + for (TaskTimeoutStrategy es : values()) { + if (es.getCode() == status) { + return es; + } + } + throw new IllegalArgumentException("invalid status : " + status); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/TimeoutFlag.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/TimeoutFlag.java new file mode 100644 index 000000000..5c0e10f93 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/TimeoutFlag.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +/** timeout flag */ +public enum TimeoutFlag { + + /** 0 close 1 open */ + CLOSE(0, "close"), + OPEN(1, "open"); + + TimeoutFlag(int code, String desc) { + this.code = code; + this.desc = desc; + } + + @EnumValue private final int code; + private final String desc; + + public int getCode() { + return code; + } + + public String getDesc() { + return desc; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UdfType.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UdfType.java new file mode 100644 index 000000000..29cd9daaa --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UdfType.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.common; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +/** UDF type */ +public enum UdfType { + + /** 0 hive; 1 spark */ + HIVE(0, "hive"), + SPARK(1, "spark"); + + UdfType(int code, String descp) { + this.code = code; + this.descp = descp; + } + + @EnumValue private final int code; + private final String descp; + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } + + public static UdfType of(int type) { + for (UdfType ut : values()) { + if (ut.getCode() == type) { + return ut; + } + } + throw new IllegalArgumentException("invalid type : " + type); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java index c62b7e1cf..e8d533d17 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java @@ -22,7 +22,7 @@ public enum UserStatusEnum { DISABLE(1, "disable"), ; private final int code; - private final String description; + private final String description; UserStatusEnum(int code, String description) { this.code = code; diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java index b583e6f48..6b8f3dfb2 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java @@ -23,7 +23,7 @@ public enum UserTypeEnum { ; private final int code; - private final String description; + private final String description; UserTypeEnum(int code, String description) { this.code = code; diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/ConnectorDataSourceMapperConfig.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/ConnectorDataSourceMapperConfig.java new file mode 100644 index 000000000..fe9c5ca63 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/ConnectorDataSourceMapperConfig.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.config; + +import org.apache.seatunnel.app.domain.request.connector.BusinessMode; +import org.apache.seatunnel.app.domain.request.connector.SceneMode; +import org.apache.seatunnel.app.domain.response.connector.ConnectorInfo; +import org.apache.seatunnel.app.domain.response.connector.DataSourceInfo; +import org.apache.seatunnel.common.constants.PluginType; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +import lombok.Data; + +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; + +@Data +@Configuration +@PropertySource( + value = "classpath:connector-datasource-mapper.yaml", + factory = YamlSourceFactory.class) +@ConfigurationProperties(prefix = "connector-datasource-mapper") +public class ConnectorDataSourceMapperConfig { + private Map connectorDatasourceMappers; + private Map sourceDatasourceFeatures; + private Map sinkDatasourceFeatures; + + @Data + public static class ConnectorMapper { + private List dataSources; + } + + @Data + public static class DatasourceFeatures { + private List businessMode; + private List sceneMode; + } + + public Optional> supportedBusinessMode( + String datasourceName, PluginType pluginType) { + if (pluginType.equals(PluginType.SOURCE)) { + return Optional.ofNullable(sourceDatasourceFeatures.get(datasourceName)) + .map( + cm -> + cm.getBusinessMode().stream() + .map(BusinessMode::valueOf) + .collect(Collectors.toList())); + } + if (pluginType.equals(PluginType.SINK)) { + return Optional.ofNullable(sinkDatasourceFeatures.get(datasourceName)) + .map( + cm -> + cm.getBusinessMode().stream() + .map(BusinessMode::valueOf) + .collect(Collectors.toList())); + } + throw new UnsupportedOperationException( + "pluginType : " + pluginType + " not support BusinessMode"); + } + + public Optional> supportedSceneMode( + String datasourceName, PluginType pluginType) { + if (pluginType.equals(PluginType.SOURCE)) { + return Optional.ofNullable(sourceDatasourceFeatures.get(datasourceName)) + .map( + cm -> + cm.getSceneMode().stream() + .map(SceneMode::valueOf) + .collect(Collectors.toList())); + } + if (pluginType.equals(PluginType.SINK)) { + return Optional.ofNullable(sinkDatasourceFeatures.get(datasourceName)) + .map( + cm -> + cm.getSceneMode().stream() + .map(SceneMode::valueOf) + .collect(Collectors.toList())); + } + throw new UnsupportedOperationException( + "pluginType : " + pluginType + " not support SceneMode"); + } + + public Optional findConnectorForDatasourceName(String datasourceName) { + return connectorDatasourceMappers.entrySet().stream() + .map( + en -> { + return en.getValue().getDataSources().stream() + .anyMatch(n -> n.equalsIgnoreCase(datasourceName)) + ? en.getKey() + : null; + }) + .filter(Objects::nonNull) + .findFirst(); + } + + public List findDatasourceNameForConnectors(List connectors) { + + Map connectorMap = + connectors.stream() + .collect( + Collectors.toMap( + connectorInfo -> + connectorInfo.getPluginIdentifier().getPluginName(), + connectorInfo -> connectorInfo)); + + return connectorDatasourceMappers.entrySet().stream() + .filter(en -> connectorMap.containsKey(en.getKey())) + .flatMap( + en -> + en.getValue().getDataSources().stream() + .map( + name -> + new DataSourceInfo( + connectorMap.get(en.getKey()), + name))) + .collect(Collectors.toList()); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java index 3eb93a856..8e6df8543 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java @@ -19,6 +19,7 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; + import springfox.documentation.builders.ApiInfoBuilder; import springfox.documentation.builders.PathSelectors; import springfox.documentation.builders.RequestHandlerSelectors; @@ -29,7 +30,7 @@ @Configuration @EnableSwagger2 -public class Swagger2{ +public class Swagger2 { @Bean public Docket createRestApi() { @@ -49,5 +50,4 @@ private ApiInfo apiInfo() { .termsOfServiceUrl("https://seatunnel.apache.org/") .build(); } - } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/YamlSourceFactory.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/YamlSourceFactory.java new file mode 100644 index 000000000..f84b40609 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/YamlSourceFactory.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.config; + +import org.springframework.boot.env.YamlPropertySourceLoader; +import org.springframework.core.env.PropertiesPropertySource; +import org.springframework.core.env.PropertySource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.DefaultPropertySourceFactory; +import org.springframework.core.io.support.EncodedResource; + +import java.io.IOException; +import java.util.List; +import java.util.Properties; + +public class YamlSourceFactory extends DefaultPropertySourceFactory { + @Override + public PropertySource createPropertySource(String name, EncodedResource resource) + throws IOException { + if (resource == null) { + return super.createPropertySource(name, resource); + } + Resource resourceResource = resource.getResource(); + if (!resourceResource.exists()) { + return new PropertiesPropertySource(null, new Properties()); + } else if (resourceResource.getFilename().endsWith(".yml") + || resourceResource.getFilename().endsWith(".yaml")) { + List> sources = + new YamlPropertySourceLoader() + .load(resourceResource.getFilename(), resourceResource); + return sources.get(0); + } + return super.createPropertySource(name, resource); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java index fa83416fd..091190b04 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java @@ -20,23 +20,23 @@ import org.apache.seatunnel.app.common.Result; import org.apache.seatunnel.app.service.IRoleService; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; + import javax.annotation.Resource; import javax.validation.constraints.NotNull; -@RequestMapping("/api/v1/auth") +@RequestMapping("/seatunnel/api/v1/auth") @RestController public class AuthController { - @Resource - private IRoleService roleServiceImpl; + @Resource private IRoleService roleServiceImpl; @GetMapping("/userRole") @ApiOperation(value = "check relation between user and role", httpMethod = "GET") @@ -44,7 +44,9 @@ public class AuthController { @ApiImplicitParam(name = "username", value = "user name", dataType = "String"), @ApiImplicitParam(name = "roleName", value = "role name", dataType = "String"), }) - public Result userRole(@RequestParam("username") @NotNull String username, @RequestParam("roleName") @NotNull String roleName){ + public Result userRole( + @RequestParam("username") @NotNull String username, + @RequestParam("roleName") @NotNull String roleName) { final boolean b = roleServiceImpl.checkUserRole(username, roleName); return Result.success(b); } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/BaseController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/BaseController.java new file mode 100644 index 000000000..c72713db7 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/BaseController.java @@ -0,0 +1,255 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Constants; +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.common.Status; + +import org.apache.commons.lang3.StringUtils; + +import javax.servlet.http.HttpServletRequest; + +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; + +import static org.apache.seatunnel.app.common.Constants.COMMA; +import static org.apache.seatunnel.app.common.Constants.HTTP_HEADER_UNKNOWN; +import static org.apache.seatunnel.app.common.Constants.HTTP_X_FORWARDED_FOR; +import static org.apache.seatunnel.app.common.Constants.HTTP_X_REAL_IP; + +public abstract class BaseController { + + /** + * check params + * + * @param pageNo page number + * @param pageSize page size + * @return check result code + */ + public Result checkPageParams(int pageNo, int pageSize) { + Result result = Result.success(); + Status resultEnum = Status.SUCCESS; + String msg = Status.SUCCESS.getMsg(); + if (pageNo <= 0) { + resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; + msg = + MessageFormat.format( + Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_NUMBER); + } else if (pageSize <= 0) { + resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; + msg = + MessageFormat.format( + Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_SIZE); + } + result.setCode(resultEnum.getCode()); + result.setMsg(msg); + return result; + } + + /** + * get ip address in the http request + * + * @param request http servlet request + * @return client ip address + */ + public static String getClientIpAddress(HttpServletRequest request) { + String clientIp = request.getHeader(HTTP_X_FORWARDED_FOR); + + if (StringUtils.isNotEmpty(clientIp) && !clientIp.equalsIgnoreCase(HTTP_HEADER_UNKNOWN)) { + int index = clientIp.indexOf(COMMA); + if (index != -1) { + return clientIp.substring(0, index); + } else { + return clientIp; + } + } + + clientIp = request.getHeader(HTTP_X_REAL_IP); + if (StringUtils.isNotEmpty(clientIp) && !clientIp.equalsIgnoreCase(HTTP_HEADER_UNKNOWN)) { + return clientIp; + } + + return request.getRemoteAddr(); + } + + /** + * return data list + * + * @param result result code + * @return result code + */ + public Result returnDataList(Map result) { + Status status = (Status) result.get(Constants.STATUS); + if (status == Status.SUCCESS) { + String msg = Status.SUCCESS.getMsg(); + Object datalist = result.get(Constants.DATA_LIST); + return success(msg, datalist); + } else { + Integer code = status.getCode(); + String msg = (String) result.get(Constants.MSG); + return error(code, msg); + } + } + + /** + * success + * + * @return success result code + */ + public Result success() { + Result result = Result.success(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(Status.SUCCESS.getMsg()); + + return result; + } + + /** + * success does not need to return data + * + * @param msg success message + * @return success result code + */ + public Result success(String msg) { + Result result = Result.success(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(msg); + + return result; + } + + /** + * return data no paging + * + * @param msg success message + * @param list data list + * @return success result code + */ + public Result success(String msg, Object list) { + return getResult(msg, list); + } + + /** + * return data no paging + * + * @param list success + * @return success result code + */ + public Result success(Object list) { + return getResult(Status.SUCCESS.getMsg(), list); + } + + /** + * return the data use Map format, for example, passing the value of key, value, passing a value + * eg. "/user/add" then return user name: zhangsan + * + * @param msg message + * @param object success object data + * @return success result code + */ + public Result success(String msg, Map object) { + return getResult(msg, object); + } + + /** + * return data with paging + * + * @param totalList success object list + * @param currentPage current page + * @param total total + * @param totalPage total page + * @return success result code + */ + public Result success(Object totalList, Integer currentPage, Integer total, Integer totalPage) { + Result result = Result.success(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(Status.SUCCESS.getMsg()); + + Map map = new HashMap<>(8); + map.put(Constants.TOTAL_LIST, totalList); + map.put(Constants.CURRENT_PAGE, currentPage); + map.put(Constants.TOTAL_PAGE, totalPage); + map.put(Constants.TOTAL, total); + result.setData(map); + return result; + } + + /** + * error handle + * + * @param code result code + * @param msg result message + * @return error result code + */ + public Result error(Integer code, String msg) { + Result result = Result.success(); + result.setCode(code); + result.setMsg(msg); + return result; + } + + /** + * put message to map + * + * @param result result + * @param status status + * @param statusParams object messages + */ + protected void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } + + /** + * put message to result object + * + * @param result result + * @param status status + * @param statusParams status parameters + */ + protected void putMsg(Result result, Status status, Object... statusParams) { + result.setCode(status.getCode()); + + if (statusParams != null && statusParams.length > 0) { + result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.setMsg(status.getMsg()); + } + } + + /** + * get result + * + * @param msg message + * @param list object list + * @return result code + */ + private Result getResult(String msg, Object list) { + Result result = Result.success(); + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(msg); + result.setData(list); + return result; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ConnectorController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ConnectorController.java new file mode 100644 index 000000000..c05d85451 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ConnectorController.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.request.connector.ConnectorStatus; +import org.apache.seatunnel.app.domain.response.connector.ConnectorInfo; +import org.apache.seatunnel.app.service.IConnectorService; +import org.apache.seatunnel.common.utils.JsonUtils; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + +import javax.annotation.Resource; + +import java.io.IOException; +import java.util.List; + +@RequestMapping("/whaletunnel/api/v1/connector") +@RestController +public class ConnectorController { + + @Resource private IConnectorService connectorService; + + @GetMapping("/sources") + @ApiOperation(value = "list all source connector", httpMethod = "GET") + public Result> listSource( + @RequestParam(defaultValue = "ALL") ConnectorStatus status) { + return Result.success(connectorService.listSources(status)); + } + + @GetMapping("/transforms") + @ApiOperation(value = "list all transforms", httpMethod = "GET") + public Result> listAllTransform() { + return Result.success(connectorService.listTransforms()); + } + + @GetMapping("/sinks") + @ApiOperation(value = "list all sink connector", httpMethod = "GET") + public Result> listSink( + @RequestParam(defaultValue = "ALL") ConnectorStatus status) { + return Result.success(connectorService.listSinks(status)); + } + + @GetMapping("/sync") + @ApiOperation(value = "sync all connector from disk", httpMethod = "GET") + public Result> sync() throws IOException { + connectorService.sync(); + return Result.success(); + } + + @GetMapping("/form") + @ApiOperation(value = "get source connector form structure", httpMethod = "GET") + public Result getConnectorFormStructure( + @ApiParam(value = "connector type", required = true) @RequestParam String connectorType, + @ApiParam(value = "connector name", required = true) @RequestParam + String connectorName) { + return Result.success( + JsonUtils.toJsonString( + connectorService.getConnectorFormStructure(connectorType, connectorName))); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ConnectorDataSourceController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ConnectorDataSourceController.java new file mode 100644 index 000000000..b59352cd3 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ConnectorDataSourceController.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.request.connector.ConnectorStatus; +import org.apache.seatunnel.app.domain.request.connector.SceneMode; +import org.apache.seatunnel.app.domain.response.connector.ConnectorInfo; +import org.apache.seatunnel.app.domain.response.connector.DataSourceInstance; +import org.apache.seatunnel.app.service.IConnectorService; +import org.apache.seatunnel.common.constants.PluginType; +import org.apache.seatunnel.common.utils.JsonUtils; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + +import javax.annotation.Resource; + +import java.io.IOException; +import java.util.List; + +@RequestMapping("/seatunnel/api/v1/datasource") +@RestController +public class ConnectorDataSourceController { + + @Resource private IConnectorService connectorService; + + @GetMapping("/sources") + @ApiOperation(value = "Use jobID to list source DataSourceInstance", httpMethod = "GET") + public Result> listSource( + @ApiParam(value = "jobCode", required = true) @RequestParam Long jobId, + @ApiParam(value = "SceneMode", required = true) @RequestParam SceneMode sceneMode, + @ApiParam(value = "ConnectorStatus", required = true) @RequestParam + ConnectorStatus status) { + return Result.success( + connectorService.listSourceDataSourceInstances(jobId, sceneMode, status)); + } + + @GetMapping("/sinks") + @ApiOperation(value = "Use jobID to list sink DataSourceInstance", httpMethod = "GET") + public Result> listSink( + @ApiParam(value = "jobCode", required = true) @RequestParam Long jobId, + @ApiParam(value = "ConnectorStatus", required = true) @RequestParam + ConnectorStatus status) { + return Result.success(connectorService.listSinkDataSourcesInstances(jobId, status)); + } + + @GetMapping("/transforms") + @ApiOperation(value = "Use jobID to list transforms", httpMethod = "GET") + public Result> listAllTransform( + @ApiParam(value = "jobCode", required = true) @RequestParam Long jobId) { + return Result.success(connectorService.listTransformsForJob(jobId)); + } + + @GetMapping("/sync") + @ApiOperation(value = "sync all connector from disk", httpMethod = "GET") + public Result> sync() throws IOException { + connectorService.sync(); + return Result.success(); + } + + @GetMapping("/form") + @ApiOperation(value = "get datasource instance form structure", httpMethod = "GET") + public Result getDatasourceInstanceFormStructure( + @ApiParam(value = "jobCode", required = false) + @RequestParam(required = false, value = "jobCode") + Long jobId, + @ApiParam(value = "connector type", required = true) + @RequestParam(required = true, value = "connectorType") + String connectorType, + @ApiParam(value = "connector name", required = false) + @RequestParam(required = false, value = "connectorName") + String connectorName, + @ApiParam(value = "dataSource instanceId", required = false) + @RequestParam(required = false, value = "dataSourceInstanceId") + Long dataSourceInstanceId) { + if (PluginType.TRANSFORM.getType().equals(connectorType)) { + return Result.success( + JsonUtils.toJsonString( + connectorService.getTransformFormStructure( + connectorType, connectorName))); + } + return Result.success( + JsonUtils.toJsonString( + connectorService.getDatasourceFormStructure( + jobId, dataSourceInstanceId, connectorType))); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/EngineController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/EngineController.java new file mode 100644 index 000000000..0f081f2dd --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/EngineController.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.bean.engine.EngineDataType; +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.response.engine.Engine; +import org.apache.seatunnel.app.service.IEngineService; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; + +import javax.annotation.Resource; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +@RequestMapping("/seatunnel/api/v1/engine") +@RestController +public class EngineController { + + @Resource private IEngineService engineService; + + @GetMapping("/list") + @ApiOperation(value = "list all supported engines", httpMethod = "GET") + public Result> listSupportEngines() { + return Result.success(engineService.listSupportEngines()); + } + + @GetMapping("/type") + @ApiOperation(value = "list all supported Data Type", httpMethod = "GET") + public Result> listSupportDataTypes() { + return Result.success( + Arrays.stream(engineService.listSupportDataTypes()) + .map(EngineDataType.DataType::getName) + .collect(Collectors.toList())); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/EnvController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/EnvController.java new file mode 100644 index 000000000..dfd8661fd --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/EnvController.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.service.IJobEnvService; +import org.apache.seatunnel.common.utils.JsonUtils; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; + +import javax.annotation.Resource; + +@RequestMapping("/seatunnel/api/v1/job/env") +@RestController +public class EnvController { + + @Resource private IJobEnvService jobEnvService; + + @GetMapping("") + @ApiOperation(value = "get job env config parameters", httpMethod = "GET") + public Result getJobEnvFormStructure() { + return Result.success(JsonUtils.toJsonString(jobEnvService.getJobEnvFormStructure())); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobConfigController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobConfigController.java new file mode 100644 index 000000000..054f49ce7 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobConfigController.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.request.job.JobConfig; +import org.apache.seatunnel.app.domain.response.job.JobConfigRes; +import org.apache.seatunnel.app.service.IJobConfigService; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import com.fasterxml.jackson.core.JsonProcessingException; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + +import javax.annotation.Resource; + +@RestController +@RequestMapping("/seatunnel/api/v1/job/config") +public class JobConfigController { + + @Resource private IJobConfigService jobConfigService; + + @PutMapping("/{jobVersionId}") + @ApiOperation(value = "update job config", httpMethod = "PUT") + Result updateJobConfig( + @ApiParam(value = "userId", required = true) @RequestAttribute("userId") Integer userId, + @ApiParam(value = "jobVersionId", required = true) @PathVariable long jobVersionId, + @ApiParam(value = "jobConfig", required = true) @RequestBody JobConfig jobConfig) + throws JsonProcessingException { + jobConfigService.updateJobConfig(userId, jobVersionId, jobConfig); + return Result.success(); + } + + @GetMapping("/{jobVersionId}") + @ApiOperation(value = "get job config", httpMethod = "GET") + Result getJobConfig( + @ApiParam(value = "jobVersionId", required = true) @PathVariable long jobVersionId) + throws JsonProcessingException { + return Result.success(jobConfigService.getJobConfig(jobVersionId)); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobDefinitionController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobDefinitionController.java new file mode 100644 index 000000000..e95baa8e2 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobDefinitionController.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.dal.entity.JobDefinition; +import org.apache.seatunnel.app.domain.request.job.JobReq; +import org.apache.seatunnel.app.domain.response.PageInfo; +import org.apache.seatunnel.app.domain.response.job.JobDefinitionRes; +import org.apache.seatunnel.app.service.IJobDefinitionService; +import org.apache.seatunnel.server.common.CodeGenerateUtils; + +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + +import javax.annotation.Resource; + +@RestController +@RequestMapping("/seatunnel/api/v1/job/definition") +public class JobDefinitionController { + + @Resource private IJobDefinitionService jobService; + + /** + * create job definition + * + * @return created job id + */ + @PostMapping + @ApiOperation(value = "create job definition", httpMethod = "POST") + Result createJobDefinition( + @ApiParam(value = "userId", required = true) @RequestAttribute("userId") Integer userId, + @RequestBody JobReq jobReq) + throws CodeGenerateUtils.CodeGenerateException { + return Result.success(jobService.createJob(userId, jobReq)); + } + + @GetMapping + @ApiOperation(value = "get job definition", httpMethod = "GET") + Result> getJobDefinition( + @ApiParam(value = "job name") @RequestParam(required = false) String searchName, + @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, + @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize, + @ApiParam(value = "job mode") @RequestParam(required = false) String jobMode) { + return Result.success(jobService.getJob(searchName, pageNo, pageSize, jobMode)); + } + + @GetMapping("/{jobId}") + @ApiOperation(value = "get job definition", httpMethod = "GET") + Result getJobDefinition(@PathVariable long jobId) { + return Result.success(jobService.getJobDefinitionByJobId(jobId)); + } + + @DeleteMapping + @ApiOperation(value = "delete job definition", httpMethod = "DELETE") + Result deleteJobDefinition( + @ApiParam(value = "id", required = true) @RequestParam long id) { + jobService.deleteJob(id); + return Result.success(); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobExecutorController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobExecutorController.java new file mode 100644 index 000000000..392c672b8 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobExecutorController.java @@ -0,0 +1,72 @@ +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.response.executor.JobExecutorRes; +import org.apache.seatunnel.app.service.IJobExecutorService; +import org.apache.seatunnel.app.service.IJobInstanceService; +import org.apache.seatunnel.server.common.SeatunnelErrorEnum; +import org.apache.seatunnel.server.common.SeatunnelException; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Resource; + +import java.io.IOException; + +/** @Description @ClassName JobExecutorController @Author zhang @Date 2023/6/30 15:09 */ +@Slf4j +@RequestMapping("/seatunnel/api/v1/job/executor") +@RestController +public class JobExecutorController { + + @Autowired IJobExecutorService jobExecutorService; + @Resource private IJobInstanceService jobInstanceService; + + @GetMapping("/execute") + @ApiOperation(value = "Execute synchronization tasks", httpMethod = "GET") + public Result jobExecutor( + @ApiParam(value = "userId", required = true) @RequestAttribute("userId") Integer userId, + @ApiParam(value = "jobDefineId", required = true) @RequestParam("jobDefineId") + Long jobDefineId) { + return jobExecutorService.jobExecute(userId, jobDefineId); + } + + @GetMapping("/resource") + @ApiOperation(value = "get the resource for job executor", httpMethod = "GET") + public Result resource( + @ApiParam(value = "userId", required = true) @RequestParam Integer userId, + @ApiParam(value = "Job define id", required = true) @RequestParam Long jobDefineId) + throws IOException { + try { + JobExecutorRes executeResource = + jobInstanceService.createExecuteResource(userId, jobDefineId); + return Result.success(executeResource); + } catch (Exception e) { + log.error("Get the resource for job executor error", e); + throw new SeatunnelException(SeatunnelErrorEnum.ILLEGAL_STATE, e.getMessage()); + } + } + + @GetMapping("/pause") + public Result jobPause( + @ApiParam(value = "userId", required = true) @RequestAttribute("userId") Integer userId, + @ApiParam(value = "jobInstanceId", required = true) @RequestParam Long jobInstanceId) { + return jobExecutorService.jobPause(userId, jobInstanceId); + } + + @GetMapping("/restore") + public Result jobRestore( + @ApiParam(value = "userId", required = true) @RequestAttribute("userId") Integer userId, + @ApiParam(value = "jobInstanceId", required = true) @RequestParam Long jobInstanceId) { + return jobExecutorService.jobStore(userId, jobInstanceId); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobMetricsController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobMetricsController.java new file mode 100644 index 000000000..bd8bd31d2 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobMetricsController.java @@ -0,0 +1,61 @@ +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.response.metrics.JobDAG; +import org.apache.seatunnel.app.domain.response.metrics.JobPipelineDetailMetricsRes; +import org.apache.seatunnel.app.domain.response.metrics.JobPipelineSummaryMetricsRes; +import org.apache.seatunnel.app.service.IJobMetricsService; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.fasterxml.jackson.core.JsonProcessingException; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + +import javax.annotation.Resource; + +import java.io.IOException; +import java.util.List; + +/** @Description @ClassName JobMetricsController @Author zhang @Date 2023/7/6 18:30 */ +@RequestMapping("/seatunnel/api/v1/job/metrics") +@RestController +public class JobMetricsController { + + @Resource private IJobMetricsService jobMetricsService; + + @GetMapping("/detail") + @ApiOperation(value = "get the job pipeline detail metrics", httpMethod = "GET") + public Result> detail( + @ApiParam(value = "userId", required = true) @RequestAttribute Integer userId, + @ApiParam(value = "jobInstanceId", required = true) @RequestParam Long jobInstanceId) + throws IOException { + + return Result.success( + jobMetricsService.getJobPipelineDetailMetricsRes(userId, jobInstanceId)); + } + + @GetMapping("/dag") + @ApiOperation(value = "get the job pipeline dag", httpMethod = "GET") + public Result getJobDAG( + @ApiParam(value = "userId", required = true) @RequestAttribute Integer userId, + @ApiParam(value = "jobInstanceId", required = true) @RequestParam Long jobInstanceId) + throws JsonProcessingException { + + return Result.success(jobMetricsService.getJobDAG(userId, jobInstanceId)); + } + + @GetMapping("/summary") + @ApiOperation(value = "get the job pipeline summary metrics", httpMethod = "GET") + public Result> summary( + @ApiParam(value = "userId", required = true) @RequestAttribute Integer userId, + @ApiParam(value = "jobInstanceId", required = true) @RequestParam Long jobInstanceId) + throws IOException { + return Result.success( + jobMetricsService.getJobPipelineSummaryMetrics(userId, jobInstanceId)); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobTaskController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobTaskController.java new file mode 100644 index 000000000..0e2fd1e9f --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/JobTaskController.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.request.job.JobDAG; +import org.apache.seatunnel.app.domain.request.job.JobTaskInfo; +import org.apache.seatunnel.app.domain.request.job.PluginConfig; +import org.apache.seatunnel.app.domain.response.job.JobTaskCheckRes; +import org.apache.seatunnel.app.service.IJobTaskService; + +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + +import javax.annotation.Resource; + +@RestController +@RequestMapping("/seatunnel/api/v1/job/") +public class JobTaskController { + + @Resource private IJobTaskService jobTaskService; + + @PostMapping("/dag/{jobVersionId}") + @ApiOperation(value = "save job dag", httpMethod = "POST") + Result saveJobDAG( + @ApiParam(value = "job version id", required = true) @PathVariable long jobVersionId, + @ApiParam(value = "task info", required = true) @RequestBody JobDAG jobDAG) { + return Result.success(jobTaskService.saveJobDAG(jobVersionId, jobDAG)); + } + + @GetMapping("/{jobVersionId}") + @ApiOperation(value = "get job task and dag info", httpMethod = "GET") + Result getJob( + @ApiParam(value = "job version id", required = true) @PathVariable long jobVersionId) { + return Result.success(jobTaskService.getTaskConfig(jobVersionId)); + } + + @PostMapping("/task/{jobVersionId}") + @ApiOperation(value = "save or update single task", httpMethod = "POST") + Result saveSingleTask( + @ApiParam(value = "job version id", required = true) @PathVariable long jobVersionId, + @ApiParam(value = "task info", required = true) @RequestBody + PluginConfig pluginConfig) { + jobTaskService.saveSingleTask(jobVersionId, pluginConfig); + return Result.success(); + } + + @GetMapping("/task/{jobVersionId}") + @ApiOperation(value = "get single task", httpMethod = "GET") + Result getSingleTask( + @ApiParam(value = "job version id", required = true) @PathVariable long jobVersionId, + @ApiParam(value = "task plugin id", required = true) @RequestParam String pluginId) { + return Result.success(jobTaskService.getSingleTask(jobVersionId, pluginId)); + } + + @DeleteMapping("/task/{jobVersionId}") + @ApiOperation(value = "delete single task", httpMethod = "DELETE") + Result deleteSingleTask( + @ApiParam(value = "job version id", required = true) @PathVariable long jobVersionId, + @ApiParam(value = "task plugin id", required = true) @RequestParam String pluginId) { + jobTaskService.deleteSingleTask(jobVersionId, pluginId); + return Result.success(); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java index ff094d436..9b43f4431 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java @@ -1,139 +1,155 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.seatunnel.app.controller; - -import org.apache.seatunnel.app.aspect.UserId; -import org.apache.seatunnel.app.common.Result; -import org.apache.seatunnel.app.domain.request.script.CreateScriptReq; -import org.apache.seatunnel.app.domain.request.script.PublishScriptReq; -import org.apache.seatunnel.app.domain.request.script.ScriptListReq; -import org.apache.seatunnel.app.domain.request.script.UpdateScriptContentReq; -import org.apache.seatunnel.app.domain.request.script.UpdateScriptParamReq; -import org.apache.seatunnel.app.domain.response.PageInfo; -import org.apache.seatunnel.app.domain.response.script.CreateScriptRes; -import org.apache.seatunnel.app.domain.response.script.ScriptFullInfoRes; -import org.apache.seatunnel.app.domain.response.script.ScriptParamRes; -import org.apache.seatunnel.app.domain.response.script.ScriptSimpleInfoRes; -import org.apache.seatunnel.app.service.IScriptService; - -import io.swagger.annotations.ApiOperation; -import io.swagger.annotations.ApiParam; -import org.springframework.web.bind.annotation.DeleteMapping; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PatchMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.PutMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RestController; -import springfox.documentation.annotations.ApiIgnore; - -import javax.annotation.Resource; -import javax.validation.constraints.NotNull; - -import java.util.List; - -@RequestMapping("/api/v1/script") -@RestController -public class ScriptController { - @Resource - private IScriptService iScriptService; - - @PostMapping - @ApiOperation(value = "add an script with content", httpMethod = "POST") - public Result createScript(@RequestBody @NotNull CreateScriptReq createScriptReq, - @ApiIgnore @UserId Integer operatorId) { - createScriptReq.setCreatorId(operatorId); - return Result.success(iScriptService.createScript(createScriptReq)); - } - - @PutMapping("/{scriptId}/content") - @ApiOperation(value = "update script", httpMethod = "PUT") - public Result updateScriptContent(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId, - @RequestBody @NotNull UpdateScriptContentReq req, - @ApiIgnore @UserId Integer operatorId) { - req.setScriptId(scriptId); - req.setMenderId(operatorId); - - iScriptService.updateScriptContent(req); - return Result.success(); - } - - @DeleteMapping("/{scriptId}") - @ApiOperation(value = "delete script", httpMethod = "DELETE") - public Result delete(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) { - iScriptService.delete(scriptId); - return Result.success(); - } - - @GetMapping - @ApiOperation(value = "script list", httpMethod = "GET") - public Result> list(@ApiParam(value = "script name") @RequestParam(required = false) String name, - @ApiParam(value = "script status") @RequestParam(required = false) Byte status, - @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, - @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { - - final ScriptListReq req = new ScriptListReq(); - req.setName(name); - req.setPageNo(pageNo); - req.setPageSize(pageSize); - - return Result.success(iScriptService.list(req)); - } - - @GetMapping("/{scriptId}/content") - @ApiOperation(value = "fetch script content", httpMethod = "GET") - public Result fetchScriptContent(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) { - return Result.success(iScriptService.fetchScriptContent(scriptId)); - } - - @GetMapping("/{scriptId}") - public Result detail(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) { - return Result.success(iScriptService.detail(scriptId)); - } - - @PutMapping("/{scriptId}/param") - @ApiOperation(value = "update script param", httpMethod = "PUT") - public Result updateScriptParam(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId, - @RequestBody @NotNull UpdateScriptParamReq updateScriptParamReq) { - updateScriptParamReq.setScriptId(scriptId); - iScriptService.updateScriptParam(updateScriptParamReq); - return Result.success(); - } - - @GetMapping("/{scriptId}/param") - @ApiOperation(value = "fetch script param", httpMethod = "GET") - public Result> fetchScriptParam(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) { - return Result.success(iScriptService.fetchScriptParam(scriptId)); - } - - @PatchMapping("/{scriptId}/publish") - @ApiOperation(value = "publish script", httpMethod = "PATCH") - public Result publish(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId, - @ApiIgnore @UserId Integer operatorId) { - - final PublishScriptReq req = new PublishScriptReq(); - req.setScriptId(scriptId); - req.setOperatorId(operatorId); - - iScriptService.publishScript(req); - return Result.success(); - } -} +/// * +// * Licensed to the Apache Software Foundation (ASF) under one or more +// * contributor license agreements. See the NOTICE file distributed with +// * this work for additional information regarding copyright ownership. +// * The ASF licenses this file to You under the Apache License, Version 2.0 +// * (the "License"); you may not use this file except in compliance with +// * the License. You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ +// +// package org.apache.seatunnel.app.controller; +// +// import org.apache.seatunnel.app.aspect.UserId; +// import org.apache.seatunnel.app.common.Result; +// import org.apache.seatunnel.app.domain.request.script.CreateScriptReq; +// import org.apache.seatunnel.app.domain.request.script.PublishScriptReq; +// import org.apache.seatunnel.app.domain.request.script.ScriptListReq; +// import org.apache.seatunnel.app.domain.request.script.UpdateScriptContentReq; +// import org.apache.seatunnel.app.domain.request.script.UpdateScriptParamReq; +// import org.apache.seatunnel.app.domain.response.PageInfo; +// import org.apache.seatunnel.app.domain.response.script.CreateScriptRes; +// import org.apache.seatunnel.app.domain.response.script.ScriptFullInfoRes; +// import org.apache.seatunnel.app.domain.response.script.ScriptParamRes; +// import org.apache.seatunnel.app.domain.response.script.ScriptSimpleInfoRes; +// import org.apache.seatunnel.app.service.IScriptService; +// +// import org.springframework.web.bind.annotation.DeleteMapping; +// import org.springframework.web.bind.annotation.GetMapping; +// import org.springframework.web.bind.annotation.PatchMapping; +// import org.springframework.web.bind.annotation.PathVariable; +// import org.springframework.web.bind.annotation.PostMapping; +// import org.springframework.web.bind.annotation.PutMapping; +// import org.springframework.web.bind.annotation.RequestBody; +// import org.springframework.web.bind.annotation.RequestMapping; +// import org.springframework.web.bind.annotation.RequestParam; +// import org.springframework.web.bind.annotation.RestController; +// +// import io.swagger.annotations.ApiOperation; +// import io.swagger.annotations.ApiParam; +// import springfox.documentation.annotations.ApiIgnore; +// +// import javax.annotation.Resource; +// import javax.validation.constraints.NotNull; +// +// import java.util.List; +// +// @RequestMapping("/seatunnel/api/v1/script") +// @RestController +// public class ScriptController { +// @Resource private IScriptService iScriptService; +// +// @PostMapping +// @ApiOperation(value = "add an script with content", httpMethod = "POST") +// public Result createScript( +// @RequestBody @NotNull CreateScriptReq createScriptReq, +// @ApiIgnore @UserId Integer operatorId) { +// createScriptReq.setCreatorId(operatorId); +// return Result.success(iScriptService.createScript(createScriptReq)); +// } +// +// @PutMapping("/{scriptId}/content") +// @ApiOperation(value = "update script", httpMethod = "PUT") +// public Result updateScriptContent( +// @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") +// Integer scriptId, +// @RequestBody @NotNull UpdateScriptContentReq req, +// @ApiIgnore @UserId Integer operatorId) { +// req.setScriptId(scriptId); +// req.setMenderId(operatorId); +// +// iScriptService.updateScriptContent(req); +// return Result.success(); +// } +// +// @DeleteMapping("/{scriptId}") +// @ApiOperation(value = "delete script", httpMethod = "DELETE") +// public Result delete( +// @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") +// Integer scriptId) { +// iScriptService.delete(scriptId); +// return Result.success(); +// } +// +// @GetMapping +// @ApiOperation(value = "script list", httpMethod = "GET") +// public Result> list( +// @ApiParam(value = "script name") @RequestParam(required = false) String name, +// @ApiParam(value = "script status") @RequestParam(required = false) Byte status, +// @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, +// @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { +// +// final ScriptListReq req = new ScriptListReq(); +// req.setName(name); +// req.setPageNo(pageNo); +// req.setPageSize(pageSize); +// +// return Result.success(iScriptService.list(req)); +// } +// +// @GetMapping("/{scriptId}/content") +// @ApiOperation(value = "fetch script content", httpMethod = "GET") +// public Result fetchScriptContent( +// @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") +// Integer scriptId) { +// return Result.success(iScriptService.fetchScriptContent(scriptId)); +// } +// +// @GetMapping("/{scriptId}") +// public Result detail( +// @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") +// Integer scriptId) { +// return Result.success(iScriptService.detail(scriptId)); +// } +// +// @PutMapping("/{scriptId}/param") +// @ApiOperation(value = "update script param", httpMethod = "PUT") +// public Result updateScriptParam( +// @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") +// Integer scriptId, +// @RequestBody @NotNull UpdateScriptParamReq updateScriptParamReq) { +// updateScriptParamReq.setScriptId(scriptId); +// iScriptService.updateScriptParam(updateScriptParamReq); +// return Result.success(); +// } +// +// @GetMapping("/{scriptId}/param") +// @ApiOperation(value = "fetch script param", httpMethod = "GET") +// public Result> fetchScriptParam( +// @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") +// Integer scriptId) { +// return Result.success(iScriptService.fetchScriptParam(scriptId)); +// } +// +// @PatchMapping("/{scriptId}/publish") +// @ApiOperation(value = "publish script", httpMethod = "PATCH") +// public Result publish( +// @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") +// Integer scriptId, +// @ApiIgnore @UserId Integer operatorId) { +// +// final PublishScriptReq req = new PublishScriptReq(); +// req.setScriptId(scriptId); +// req.setOperatorId(operatorId); +// +// iScriptService.publishScript(req); +// return Result.success(); +// } +// } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/SeatunnelDatasourceController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/SeatunnelDatasourceController.java new file mode 100644 index 000000000..ac8929964 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/SeatunnelDatasourceController.java @@ -0,0 +1,426 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Constants; +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.dal.dao.IUserDao; +import org.apache.seatunnel.app.dal.dao.TaskDefinitionDao; +import org.apache.seatunnel.app.dal.entity.TaskMainInfo; +import org.apache.seatunnel.app.dal.entity.User; +import org.apache.seatunnel.app.domain.dto.datasource.DatabaseTableFields; +import org.apache.seatunnel.app.domain.dto.datasource.DatabaseTables; +import org.apache.seatunnel.app.domain.dto.datasource.TableInfo; +import org.apache.seatunnel.app.domain.request.datasource.DatasourceCheckReq; +import org.apache.seatunnel.app.domain.request.datasource.DatasourceReq; +import org.apache.seatunnel.app.domain.response.PageInfo; +import org.apache.seatunnel.app.domain.response.datasource.DatasourceDetailRes; +import org.apache.seatunnel.app.domain.response.datasource.DatasourceRes; +import org.apache.seatunnel.app.service.IDatasourceService; +import org.apache.seatunnel.app.utils.CartesianProductUtils; +import org.apache.seatunnel.app.utils.JSONUtils; +import org.apache.seatunnel.app.utils.PropertyUtils; +import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo; +import org.apache.seatunnel.datasource.plugin.api.model.TableField; +import org.apache.seatunnel.server.common.SeatunnelErrorEnum; +import org.apache.seatunnel.server.common.SeatunnelException; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.MapUtils; +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; + +import javax.annotation.Resource; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import static org.apache.seatunnel.app.common.Constants.SESSION_USER; + +@RestController +@RequestMapping("/seatunnel/api/v1/datasource") +public class SeatunnelDatasourceController extends BaseController { + + @Autowired private IDatasourceService datasourceService; + + @Autowired private TaskDefinitionDao taskDefinitionDao; + + @Resource(name = "userDaoImpl") + private IUserDao userMapper; + + private static final String DEFAULT_PLUGIN_VERSION = "1.0.0"; + private static final String WS_SOURCE = "WS"; + + private static final List wsSupportDatasources = + PropertyUtils.getList(Constants.WS_SUPPORT_DATASOURCES, Constants.COMMA); + + @ApiOperation(value = "create datasource", notes = "create datasource") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "datasourceName", + value = "datasource name", + required = true, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "pluginName", + value = "plugin name", + required = true, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "description", + value = "description", + required = false, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "datasourceConfig", + value = "datasource config", + required = true, + dataType = "String", + paramType = "query") + }) + @PostMapping("/create") + Result createDatasource( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestBody DatasourceReq req) { + String datasourceConfig = req.getDatasourceConfig(); + Map stringStringMap = JSONUtils.toMap(datasourceConfig); + return Result.success( + datasourceService.createDatasource( + loginUser.getId(), + req.getDatasourceName(), + req.getPluginName(), + DEFAULT_PLUGIN_VERSION, + req.getDescription(), + stringStringMap)); + } + + @ApiOperation(value = "test datasource connection", notes = "test datasource connection") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "pluginName", + value = "plugin name", + required = true, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "datasourceConfig", + value = "datasource config", + required = true, + dataType = "String", + paramType = "query") + }) + @PostMapping("/check/connect") + Result testConnect( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestBody DatasourceCheckReq req) { + + // Map stringStringMap = JSONUtils.toMap(req.getDatasourceConfig()); + return Result.success( + datasourceService.testDatasourceConnectionAble( + loginUser.getId(), + req.getPluginName(), + DEFAULT_PLUGIN_VERSION, + req.getDatasourceConfig())); + } + + @ApiOperation(value = "update datasource", notes = "update datasource") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "datasourceName", + value = "datasource name", + required = false, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "description", + value = "description", + required = false, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "datasourceConfig", + value = "datasource config", + required = false, + dataType = "String", + paramType = "query") + }) + @PutMapping("/{id}") + Result updateDatasource( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @PathVariable("id") String id, + @RequestBody DatasourceReq req) { + Map stringStringMap = JSONUtils.toMap(req.getDatasourceConfig()); + Long datasourceId = Long.parseLong(id); + return Result.success( + datasourceService.updateDatasource( + loginUser.getId(), + datasourceId, + req.getDatasourceName(), + req.getDescription(), + stringStringMap)); + } + + @ApiOperation(value = "delete datasource by id", notes = "delete datasource by id") + @DeleteMapping("/{id}") + Result deleteDatasource( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @PathVariable("id") String id) { + Long datasourceId = Long.parseLong(id); + List taskMainInfos = taskDefinitionDao.queryByDataSourceId(datasourceId); + if (taskMainInfos.size() > 0) { + throw new SeatunnelException( + SeatunnelErrorEnum.DATA_SOURCE_HAD_USED, + taskMainInfos.stream() + .map( + info -> + String.format( + "%s - %s", + info.getProcessDefinitionName(), + info.getTaskName())) + .collect(Collectors.toList())); + } + return Result.success(datasourceService.deleteDatasource(loginUser.getId(), datasourceId)); + } + + @ApiOperation(value = "get datasource detail", notes = "get datasource detail") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "userId", + value = "user id", + required = true, + dataType = "String", + paramType = "query"), + }) + @GetMapping("/{id}") + Result getDatasource( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @PathVariable("id") String id) { + return Result.success(datasourceService.queryDatasourceDetailById(loginUser.getId(), id)); + } + + @ApiOperation(value = "get datasource list", notes = "get datasource list") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "searchVal", + value = "search value", + required = false, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "pluginName", + value = "plugin name", + required = false, + dataType = "String", + paramType = "query"), + @ApiImplicitParam( + name = "pageNo", + value = "page no", + required = false, + dataType = "Integer", + paramType = "query"), + @ApiImplicitParam( + name = "pageSize", + value = "page size", + required = false, + dataType = "Integer", + paramType = "query") + }) + @GetMapping("/list") + Result> getDatasourceList( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("searchVal") String searchVal, + @RequestParam("pluginName") String pluginName, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + PageInfo datasourceResPageInfo = + datasourceService.queryDatasourceList( + loginUser.getId(), searchVal, pluginName, pageNo, pageSize); + if (CollectionUtils.isNotEmpty(datasourceResPageInfo.getData())) { + Map userIdNameMap = userIdNameMap(); + datasourceResPageInfo + .getData() + .forEach( + datasourceRes -> { + Map datasourceConfig = + datasourceRes.getDatasourceConfig(); + Optional.ofNullable( + MapUtils.getString( + datasourceConfig, Constants.PASSWORD)) + .ifPresent( + password -> { + datasourceConfig.put( + Constants.PASSWORD, + CartesianProductUtils.maskPassword( + password)); + }); + datasourceRes.setDatasourceConfig(datasourceConfig); + datasourceRes.setCreateUserName( + userIdNameMap.getOrDefault( + datasourceRes.getCreateUserId(), "")); + datasourceRes.setUpdateUserName( + userIdNameMap.getOrDefault( + datasourceRes.getUpdateUserId(), "")); + }); + } + return Result.success(datasourceResPageInfo); + } + + @ApiOperation(value = "get datasource type list", notes = "get datasource type list") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "showVirtualDataSource", + value = "show virtual datasource", + required = false, + defaultValue = "true", + dataType = "Boolean", + paramType = "query") + }) + @GetMapping("/support-datasources") + Result>> getSupportDatasources( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("showVirtualDataSource") Boolean showVirtualDataSource, + @RequestParam(value = "source", required = false) String source) { + Map> allDatasources = + datasourceService.queryAllDatasourcesGroupByType(showVirtualDataSource); + // default source is WS + if (StringUtils.isEmpty(source) || source.equals(WS_SOURCE)) { + allDatasources.forEach( + (k, typeList) -> { + typeList = + typeList.stream() + .filter( + plugin -> + wsSupportDatasources.contains( + plugin.getName())) + .collect(Collectors.toList()); + allDatasources.put(k, typeList); + }); + } + return Result.success(allDatasources); + } + + @GetMapping("/dynamic-form") + Result getDynamicForm( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("pluginName") String pluginName) { + return Result.success(datasourceService.getDynamicForm(pluginName)); + } + + @GetMapping("/databases") + Result> getDatabases( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("datasourceName") String datasourceName) { + return Result.success(datasourceService.queryDatabaseByDatasourceName(datasourceName)); + } + + @GetMapping("/tables") + Result> getTableNames( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("datasourceName") String datasourceName, + @RequestParam("databaseName") String databaseName) { + return Result.success(datasourceService.queryTableNames(datasourceName, databaseName)); + } + + @GetMapping("/schema") + Result> getTableFields( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("datasourceId") String datasourceId, + @RequestParam(value = "databaseName", required = false) String databaseName, + @RequestParam("tableName") String tableName) { + DatasourceDetailRes res = datasourceService.queryDatasourceDetailById(datasourceId); + if (StringUtils.isEmpty(databaseName)) { + throw new SeatunnelException( + SeatunnelErrorEnum.INVALID_DATASOURCE, res.getDatasourceName()); + } + List tableFields = + datasourceService.queryTableSchema( + res.getDatasourceName(), databaseName, tableName); + return Result.success(tableFields); + } + + @PostMapping("/schemas") + Result> getMultiTableFields( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("datasourceId") String datasourceId, + @RequestBody List tableNames) { + DatasourceDetailRes res = datasourceService.queryDatasourceDetailById(datasourceId); + List tableFields = new ArrayList<>(); + tableNames.forEach( + database -> { + List tableInfos = new ArrayList<>(); + database.getTables() + .forEach( + tableName -> { + List tableField = + datasourceService.queryTableSchema( + res.getDatasourceName(), + database.getDatabase(), + tableName); + tableInfos.add(new TableInfo(tableName, tableField)); + }); + tableFields.add(new DatabaseTableFields(database.getDatabase(), tableInfos)); + }); + return Result.success(tableFields); + } + + @GetMapping("/all-tables") + Result> getTables( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestParam("datasourceId") String datasourceId) { + DatasourceDetailRes res = datasourceService.queryDatasourceDetailById(datasourceId); + List tables = new ArrayList<>(); + List databases = + datasourceService.queryDatabaseByDatasourceName(res.getDatasourceName()); + databases.forEach( + database -> { + tables.add( + new DatabaseTables( + database, + datasourceService.queryTableNames( + res.getDatasourceName(), database))); + }); + return Result.success(tables); + } + + public Map userIdNameMap() { + return userMapper.queryEnabledUsers().stream() + .collect(Collectors.toMap(User::getId, User::getUsername)); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TableSchemaController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TableSchemaController.java new file mode 100644 index 000000000..e9f355150 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TableSchemaController.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.request.job.DataSourceOption; +import org.apache.seatunnel.app.domain.request.job.TableSchemaReq; +import org.apache.seatunnel.app.domain.response.job.TableSchemaRes; +import org.apache.seatunnel.app.service.ITableSchemaService; + +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + +import javax.annotation.Resource; + +@RestController +@RequestMapping("/seatunnel/api/v1/job/table") +public class TableSchemaController { + + @Resource private ITableSchemaService tableSchemaService; + + @PostMapping("/schema") + Result querySchemaMapping( + @ApiParam(value = "datasource plugin name", required = true) @RequestParam + String pluginName, + @ApiParam(value = "task info", required = true) @RequestBody + TableSchemaReq tableSchemaReq) { + return Result.success(tableSchemaService.getSeaTunnelSchema(pluginName, tableSchemaReq)); + } + + @PostMapping("/check") + @ApiOperation(value = "check database and table is exist", httpMethod = "POST") + public Result checkDatabaseAndTable( + @RequestParam String datasourceId, @RequestBody DataSourceOption dataSourceOption) { + return Result.success( + tableSchemaService.checkDatabaseAndTable(datasourceId, dataSourceOption)); + } + + @GetMapping("/column-projection") + Result queryColumnProjection( + @ApiParam(value = "datasource plugin name", required = true) @RequestParam + String pluginName) { + return Result.success(tableSchemaService.getColumnProjection(pluginName)); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java index 43f141d71..716cd9953 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java @@ -1,115 +1,128 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.seatunnel.app.controller; - -import org.apache.seatunnel.app.aspect.UserId; -import org.apache.seatunnel.app.common.Result; -import org.apache.seatunnel.app.domain.request.task.ExecuteReq; -import org.apache.seatunnel.app.domain.request.task.InstanceListReq; -import org.apache.seatunnel.app.domain.request.task.InstanceLogRes; -import org.apache.seatunnel.app.domain.request.task.JobListReq; -import org.apache.seatunnel.app.domain.request.task.RecycleScriptReq; -import org.apache.seatunnel.app.domain.response.PageInfo; -import org.apache.seatunnel.app.domain.response.task.InstanceSimpleInfoRes; -import org.apache.seatunnel.app.domain.response.task.JobSimpleInfoRes; -import org.apache.seatunnel.app.service.ITaskService; - -import io.swagger.annotations.ApiOperation; -import io.swagger.annotations.ApiParam; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.PatchMapping; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RestController; -import springfox.documentation.annotations.ApiIgnore; - -import javax.annotation.Resource; -import javax.validation.constraints.NotNull; - -@RequestMapping("/api/v1/task") -@RestController -public class TaskController { - - @Resource - private ITaskService iTaskService; - - @PatchMapping("/{jobId}/recycle") - @ApiOperation(value = "recycle job", httpMethod = "PATCH") - Result recycle(@ApiParam(value = "job id", required = true) @PathVariable(value = "jobId") Long jobId, - @ApiIgnore @UserId Integer operatorId) { - final RecycleScriptReq req = new RecycleScriptReq(); - req.setJobId(jobId); - req.setOperatorId(operatorId); - - iTaskService.recycleScriptFromScheduler(req); - return Result.success(); - } - - @GetMapping("/job") - @ApiOperation(value = "list job", httpMethod = "GET") - Result> listJob(@ApiParam(value = "job name") @RequestParam(required = false) String name, - @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, - @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { - final JobListReq req = new JobListReq(); - req.setName(name); - req.setPageNo(pageNo); - req.setPageSize(pageSize); - - return Result.success(iTaskService.listJob(req)); - } - - @GetMapping("/instance") - @ApiOperation(value = "list instance", httpMethod = "GET") - Result> listInstance(@ApiParam(value = "job name", required = false) @RequestParam(required = false) String name, - @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, - @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { - final InstanceListReq req = new InstanceListReq(); - req.setName(name); - req.setPageNo(pageNo); - req.setPageSize(pageSize); - - return Result.success(iTaskService.listInstance(req)); - } - - @PostMapping("/{objectId}/execute") - @ApiOperation(value = "execute script temporary", httpMethod = "POST") - Result tmpExecute(@ApiParam(value = "object id", required = true) @PathVariable(value = "objectId") Long objectId, - @RequestBody @NotNull ExecuteReq req, - @ApiIgnore @UserId Integer operatorId) { - req.setObjectId(objectId); - req.setOperatorId(operatorId); - - return Result.success(iTaskService.tmpExecute(req)); - } - - @GetMapping("/{taskInstanceId}") - @ApiOperation(value = "query instance log", httpMethod = "GET") - Result queryInstanceLog(@ApiParam(value = "task instance id", required = true) @PathVariable(value = "taskInstanceId") Long taskInstanceId) { - return Result.success(iTaskService.queryInstanceLog(taskInstanceId)); - } - - @PatchMapping("/{taskInstanceId}") - @ApiOperation(value = "kill running instance", httpMethod = "POST") - Result kill(@ApiParam(value = "task instance id", required = true) @PathVariable(value = "taskInstanceId") Long taskInstanceId) { - iTaskService.kill(taskInstanceId); - return Result.success(); - } -} +/// * +// * Licensed to the Apache Software Foundation (ASF) under one or more +// * contributor license agreements. See the NOTICE file distributed with +// * this work for additional information regarding copyright ownership. +// * The ASF licenses this file to You under the Apache License, Version 2.0 +// * (the "License"); you may not use this file except in compliance with +// * the License. You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ +// +// package org.apache.seatunnel.app.controller; +// +// import org.apache.seatunnel.app.aspect.UserId; +// import org.apache.seatunnel.app.common.Result; +// import org.apache.seatunnel.app.domain.request.task.ExecuteReq; +// import org.apache.seatunnel.app.domain.request.task.InstanceListReq; +// import org.apache.seatunnel.app.domain.request.task.InstanceLogRes; +// import org.apache.seatunnel.app.domain.request.task.JobListReq; +// import org.apache.seatunnel.app.domain.request.task.RecycleScriptReq; +// import org.apache.seatunnel.app.domain.response.PageInfo; +// import org.apache.seatunnel.app.domain.response.task.InstanceSimpleInfoRes; +// import org.apache.seatunnel.app.domain.response.task.JobSimpleInfoRes; +// import org.apache.seatunnel.app.service.ITaskService; +// +// import org.springframework.web.bind.annotation.GetMapping; +// import org.springframework.web.bind.annotation.PatchMapping; +// import org.springframework.web.bind.annotation.PathVariable; +// import org.springframework.web.bind.annotation.PostMapping; +// import org.springframework.web.bind.annotation.RequestBody; +// import org.springframework.web.bind.annotation.RequestMapping; +// import org.springframework.web.bind.annotation.RequestParam; +// import org.springframework.web.bind.annotation.RestController; +// +// import io.swagger.annotations.ApiOperation; +// import io.swagger.annotations.ApiParam; +// import springfox.documentation.annotations.ApiIgnore; +// +// import javax.annotation.Resource; +// import javax.validation.constraints.NotNull; +// +// @RequestMapping("/seatunnel/api/v1/task") +// @RestController +// public class TaskController { +// +// @Resource private ITaskService iTaskService; +// +// @PatchMapping("/{jobId}/recycle") +// @ApiOperation(value = "recycle job", httpMethod = "PATCH") +// Result recycle( +// @ApiParam(value = "job id", required = true) @PathVariable(value = "jobId") Long +// jobId, +// @ApiIgnore @UserId Integer operatorId) { +// final RecycleScriptReq req = new RecycleScriptReq(); +// req.setJobId(jobId); +// req.setOperatorId(operatorId); +// +// iTaskService.recycleScriptFromScheduler(req); +// return Result.success(); +// } +// +// @GetMapping("/job") +// @ApiOperation(value = "list job", httpMethod = "GET") +// Result> listJob( +// @ApiParam(value = "job name") @RequestParam(required = false) String name, +// @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, +// @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { +// final JobListReq req = new JobListReq(); +// req.setName(name); +// req.setPageNo(pageNo); +// req.setPageSize(pageSize); +// +// return Result.success(iTaskService.listJob(req)); +// } +// +// @GetMapping("/instance") +// @ApiOperation(value = "list instance", httpMethod = "GET") +// Result> listInstance( +// @ApiParam(value = "job name", required = false) @RequestParam(required = false) +// String name, +// @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, +// @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { +// final InstanceListReq req = new InstanceListReq(); +// req.setName(name); +// req.setPageNo(pageNo); +// req.setPageSize(pageSize); +// +// return Result.success(iTaskService.listInstance(req)); +// } +// +// @PostMapping("/{objectId}/execute") +// @ApiOperation(value = "execute script temporary", httpMethod = "POST") +// Result tmpExecute( +// @ApiParam(value = "object id", required = true) @PathVariable(value = "objectId") +// Long objectId, +// @RequestBody @NotNull ExecuteReq req, +// @ApiIgnore @UserId Integer operatorId) { +// req.setObjectId(objectId); +// req.setOperatorId(operatorId); +// +// return Result.success(iTaskService.tmpExecute(req)); +// } +// +// @GetMapping("/{taskInstanceId}") +// @ApiOperation(value = "query instance log", httpMethod = "GET") +// Result queryInstanceLog( +// @ApiParam(value = "task instance id", required = true) +// @PathVariable(value = "taskInstanceId") +// Long taskInstanceId) { +// return Result.success(iTaskService.queryInstanceLog(taskInstanceId)); +// } +// +// @PatchMapping("/{taskInstanceId}") +// @ApiOperation(value = "kill running instance", httpMethod = "POST") +// Result kill( +// @ApiParam(value = "task instance id", required = true) +// @PathVariable(value = "taskInstanceId") +// Long taskInstanceId) { +// iTaskService.kill(taskInstanceId); +// return Result.success(); +// } +// } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskInstanceController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskInstanceController.java new file mode 100644 index 000000000..b585b5d6e --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskInstanceController.java @@ -0,0 +1,50 @@ +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.domain.dto.job.SeaTunnelJobInstanceDto; +import org.apache.seatunnel.app.domain.response.PageInfo; +import org.apache.seatunnel.app.service.ITaskInstanceService; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiOperation; + +/** @Description @ClassName TaskInstance @Author zhang @Date 2023/7/4 11:35 */ +@RequestMapping("/seatunnel/api/v1/task") +@RestController +public class TaskInstanceController { + + @Autowired ITaskInstanceService taskInstanceService; + + @GetMapping("/jobMetrics") + @ApiOperation(value = "get the jobMetrics list ", httpMethod = "GET") + public Result> getTaskInstanceList( + @RequestAttribute(name = "userId") Integer userId, + @RequestParam(name = "jobDefineName", required = false) String jobDefineName, + @RequestParam(name = "executorName", required = false) String executorName, + @RequestParam(name = "stateType", required = false) String stateType, + @RequestParam(name = "startDate", required = false) String startTime, + @RequestParam(name = "endDate", required = false) String endTime, + @RequestParam("syncTaskType") String syncTaskType, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + Result> result = + taskInstanceService.getSyncTaskInstancePaging( + userId, + jobDefineName, + executorName, + stateType, + startTime, + endTime, + syncTaskType, + pageNo, + pageSize); + + return result; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java index 08cd90723..106f23ff9 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java @@ -27,8 +27,6 @@ import org.apache.seatunnel.app.domain.response.user.UserSimpleInfoRes; import org.apache.seatunnel.app.service.IUserService; -import io.swagger.annotations.ApiOperation; -import io.swagger.annotations.ApiParam; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PatchMapping; @@ -40,15 +38,17 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; + import javax.annotation.Resource; import javax.validation.constraints.NotNull; -@RequestMapping("/api/v1/user") +@RequestMapping("/seatunnel/api/v1/user") @RestController public class UserController { - @Resource - private IUserService iUserService; + @Resource private IUserService iUserService; @PostMapping @ApiOperation(value = "add user", httpMethod = "POST") @@ -58,8 +58,10 @@ public Result add(@RequestBody @NotNull AddUserReq addReq) { @PutMapping("/{userId}") @ApiOperation(value = "update user", httpMethod = "PUT") - public Result update(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId, - @RequestBody @NotNull UpdateUserReq updateReq) { + public Result update( + @ApiParam(value = "user id", required = true) @PathVariable(value = "userId") + Integer userId, + @RequestBody @NotNull UpdateUserReq updateReq) { updateReq.setUserId(userId); iUserService.update(updateReq); @@ -68,16 +70,19 @@ public Result update(@ApiParam(value = "user id", required = true) @PathVa @DeleteMapping("/{userId}") @ApiOperation(value = "delete user", httpMethod = "DELETE") - public Result delete(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId) { + public Result delete( + @ApiParam(value = "user id", required = true) @PathVariable(value = "userId") + Integer userId) { iUserService.delete(userId); return Result.success(); } @GetMapping @ApiOperation(value = "user list", httpMethod = "GET") - public Result> list(@ApiParam(value = "user name") @RequestParam(required = false) String name, - @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, - @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { + public Result> list( + @ApiParam(value = "user name") @RequestParam(required = false) String name, + @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo, + @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) { final UserListReq req = new UserListReq(); req.setName(name); req.setPageNo(pageNo); @@ -88,14 +93,18 @@ public Result> list(@ApiParam(value = "user name") @ @PatchMapping("/{userId}/enable") @ApiOperation(value = "enable a user", httpMethod = "PATCH") - public Result enable(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId) { + public Result enable( + @ApiParam(value = "user id", required = true) @PathVariable(value = "userId") + Integer userId) { iUserService.enable(userId); return Result.success(); } @PutMapping("/{userId}/disable") @ApiOperation(value = "disable a user", httpMethod = "PUT") - public Result disable(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId) { + public Result disable( + @ApiParam(value = "user id", required = true) @PathVariable(value = "userId") + Integer userId) { iUserService.disable(userId); return Result.success(); } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/VirtualTableController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/VirtualTableController.java new file mode 100644 index 000000000..eeba37d91 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/VirtualTableController.java @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.controller; + +import org.apache.seatunnel.app.common.Result; +import org.apache.seatunnel.app.dal.dao.IUserDao; +import org.apache.seatunnel.app.dal.entity.User; +import org.apache.seatunnel.app.domain.request.datasource.VirtualTableReq; +import org.apache.seatunnel.app.domain.response.PageInfo; +import org.apache.seatunnel.app.domain.response.datasource.VirtualTableDetailRes; +import org.apache.seatunnel.app.domain.response.datasource.VirtualTableRes; +import org.apache.seatunnel.app.service.IVirtualTableService; + +import org.apache.commons.collections4.CollectionUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.PutMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; + +import javax.annotation.Resource; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.apache.seatunnel.app.common.Constants.SESSION_USER; + +@RestController +@RequestMapping("/seatunnel/api/v1/virtual_table") +public class VirtualTableController extends BaseController { + + @Autowired private IVirtualTableService virtualTableService; + + @Resource(name = "userDaoImpl") + private IUserDao userMapper; + + @ApiOperation(value = "create virtual table", httpMethod = "POST") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "tableReq", + value = "virtual table request", + required = true, + dataType = "VirtualTableReq") + }) + @PostMapping("/create") + Result createVirtualTable( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestBody VirtualTableReq tableReq) { + return Result.success(virtualTableService.createVirtualTable(loginUser.getId(), tableReq)); + } + + @ApiOperation(value = "update virtual table", httpMethod = "PUT") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "id", + value = "virtual table id", + required = true, + dataType = "String"), + @ApiImplicitParam( + name = "tableReq", + value = "virtual table request", + required = true, + dataType = "VirtualTableReq") + }) + @PutMapping("/{id}") + Result updateVirtualTable( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @PathVariable("id") String id, + @RequestBody VirtualTableReq tableReq) { + return Result.success( + virtualTableService.updateVirtualTable(loginUser.getId(), id, tableReq)); + } + + @ApiOperation(value = "check virtual table valid", httpMethod = "GET") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "virtualTableReq", + value = "virtual table request", + required = true, + dataType = "VirtualTableReq") + }) + @GetMapping("/check/valid") + Result checkVirtualTableValid( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @RequestBody VirtualTableReq virtualTableReq) { + return Result.success(virtualTableService.checkVirtualTableValid(virtualTableReq)); + } + + @ApiOperation(value = "get support field type", httpMethod = "GET") + @GetMapping("/support/field_type") + Result> getSupportFieldType(@RequestParam("pluginName") String pluginName) { + // return Result.success(virtualTableService.queryTableDynamicTable(pluginName)); + // todo @liuli + return null; + } + + @ApiOperation(value = "delete virtual table", httpMethod = "DELETE") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "id", + value = "virtual table id", + required = true, + dataType = "String") + }) + @DeleteMapping("/{id}") + Result deleteVirtualTable( + @PathVariable("id") String id, + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser) { + return Result.success(virtualTableService.deleteVirtualTable(loginUser.getId(), id)); + } + + @ApiOperation(value = "query virtual table detail by id", httpMethod = "GET") + @ApiImplicitParams({ + @ApiImplicitParam( + name = "id", + value = "virtual table id", + required = true, + dataType = "String") + }) + @GetMapping("/{id}") + Result queryVirtualTable( + @ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @PathVariable("id") String id) { + // rsp add plugin name + return Result.success(virtualTableService.queryVirtualTable(id)); + } + + @GetMapping("/list") + Result> getVirtualTableList( + @RequestParam("pluginName") String pluginName, + @RequestParam("datasourceName") String datasourceName, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + PageInfo virtualTableList = + virtualTableService.getVirtualTableList( + pluginName, datasourceName, pageNo, pageSize); + if (virtualTableList.getTotalCount() == 0 + || CollectionUtils.isEmpty(virtualTableList.getData())) { + return Result.success(virtualTableList); + } + Map userIdNameMap = userIdNameMap(); + virtualTableList + .getData() + .forEach( + virtualTableRes -> { + virtualTableRes.setCreateUserName( + userIdNameMap.getOrDefault( + virtualTableRes.getCreateUserId(), "")); + virtualTableRes.setUpdateUserName( + userIdNameMap.getOrDefault( + virtualTableRes.getUpdateUserId(), "")); + }); + return Result.success(virtualTableList); + } + + @GetMapping("/dynamic_config") + Result getDynamicConfig( + @RequestParam("pluginName") String pluginName, + @RequestParam("datasourceName") String datasourceName) { + return Result.success(virtualTableService.queryTableDynamicTable(pluginName)); + } + + public Map userIdNameMap() { + return userMapper.queryEnabledUsers().stream() + .collect(Collectors.toMap(User::getId, User::getUsername)); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IDatasourceDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IDatasourceDao.java new file mode 100644 index 000000000..206f63f2e --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IDatasourceDao.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.Datasource; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +import java.util.List; + +public interface IDatasourceDao { + + boolean insertDatasource(Datasource datasource); + + Datasource selectDatasourceById(Long id); + + boolean deleteDatasourceById(Long id); + + Datasource queryDatasourceByName(String name); + + boolean updateDatasourceById(Datasource datasource); + + boolean checkDatasourceNameUnique(String dataSourceName, Long dataSourceId); + + String queryDatasourceNameById(Long id); + + List selectDatasourceByPluginName(String pluginName, String pluginVersion); + + IPage selectDatasourcePage(Page page); + + IPage selectDatasourceByParam( + Page page, + List availableDatasourceIds, + String searchVal, + String pluginName); + + List selectDatasourceByIds(List ids); + + List queryAll(); + + List selectByIds(List ids); + + List selectDatasourceByUserId(int userId); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobDefinitionDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobDefinitionDao.java new file mode 100644 index 000000000..647936a4f --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobDefinitionDao.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.JobDefinition; +import org.apache.seatunnel.app.domain.response.PageInfo; + +import lombok.NonNull; + +import java.util.List; + +public interface IJobDefinitionDao { + + void add(JobDefinition job); + + JobDefinition getJob(long id); + + void updateJob(JobDefinition jobDefinition); + + PageInfo getJob(String name, Integer pageNo, Integer pageSize, String jobMode); + + List getJobList(@NonNull String name); + + JobDefinition getJobByName(@NonNull String name); + + void delete(long id); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobInstanceDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobInstanceDao.java new file mode 100644 index 000000000..f4c5b8f14 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobInstanceDao.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.JobInstance; +import org.apache.seatunnel.app.dal.mapper.JobInstanceMapper; +import org.apache.seatunnel.app.domain.dto.job.SeaTunnelJobInstanceDto; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import lombok.NonNull; + +import java.util.Date; +import java.util.List; + +public interface IJobInstanceDao { + + JobInstance getJobInstance(@NonNull Long jobInstanceId); + + JobInstance getJobInstanceByEngineId(@NonNull Long jobEngineId); + + void update(@NonNull JobInstance jobInstance); + + void insert(@NonNull JobInstance jobInstance); + + JobInstanceMapper getJobInstanceMapper(); + + IPage queryJobInstanceListPaging( + IPage page, + Date startTime, + Date endTime, + Long jobDefineId, + String jobMode); + + List getAllJobInstance(@NonNull List jobInstanceIdList); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobInstanceHistoryDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobInstanceHistoryDao.java new file mode 100644 index 000000000..9136c5926 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobInstanceHistoryDao.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.JobInstanceHistory; + +public interface IJobInstanceHistoryDao { + JobInstanceHistory getByInstanceId(Long jobInstanceId); + + void insert(JobInstanceHistory jobInstanceHistory); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobLineDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobLineDao.java new file mode 100644 index 000000000..02a3ed39f --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobLineDao.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.JobLine; + +import java.util.List; + +public interface IJobLineDao { + + void deleteLinesByVersionId(long jobVersionId); + + void insertLines(List lines); + + List getLinesByVersionId(long jobVersionId); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobMetricsDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobMetricsDao.java new file mode 100644 index 000000000..d5970463f --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobMetricsDao.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.JobMetrics; +import org.apache.seatunnel.app.dal.mapper.JobMetricsMapper; + +import lombok.NonNull; + +import java.util.List; + +public interface IJobMetricsDao { + + List getByInstanceId(@NonNull Long jobInstanceId); + + JobMetricsMapper getJobMetricsMapper(); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobTaskDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobTaskDao.java new file mode 100644 index 000000000..f9ae0a14d --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobTaskDao.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.JobTask; + +import java.util.List; + +public interface IJobTaskDao { + + List getTasksByVersionId(long jobVersionId); + + void insertTask(JobTask jobTask); + + void updateTask(JobTask jobTask); + + List getJobTaskByDataSourceId(long datasourceId); + + JobTask getTask(long jobVersionId, String pluginId); + + void updateTasks(List jobTasks); + + void deleteTasks(List jobTaskIds); + + void deleteTask(long jobVersionId, String pluginId); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobVersionDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobVersionDao.java new file mode 100644 index 000000000..593dde815 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IJobVersionDao.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.JobVersion; + +import java.util.List; + +public interface IJobVersionDao { + + void createVersion(JobVersion jobVersion); + + void updateVersion(JobVersion version); + + JobVersion getLatestVersion(long jobId); + + List getLatestVersionByJobIds(List jobIds); + + JobVersion getVersionById(long jobVersionId); + + List getVersionsByIds(List jobVersionIds); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java index db2f16623..8adb64422 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java @@ -24,6 +24,8 @@ import org.apache.seatunnel.app.domain.dto.user.UserLoginLogDto; import org.apache.seatunnel.server.common.PageData; +import java.util.List; + public interface IUserDao { int add(UpdateUserDto dto); @@ -50,4 +52,11 @@ public interface IUserDao { void disableToken(int userId); UserLoginLog getLastLoginLog(Integer userId); + + /** + * query enabled users + * + * @return + */ + List queryEnabledUsers(); } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IVirtualTableDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IVirtualTableDao.java new file mode 100644 index 000000000..9c4225d20 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IVirtualTableDao.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.VirtualTable; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +import java.util.List; + +public interface IVirtualTableDao { + + boolean insertVirtualTable(VirtualTable virtualTable); + + boolean updateVirtualTable(VirtualTable virtualTable); + + boolean deleteVirtualTable(Long id); + + VirtualTable selectVirtualTableById(Long id); + + VirtualTable selectVirtualTableByTableName(String tableName); + + boolean checkVirtualTableNameUnique(String virtualTableName, String databaseName, Long tableId); + + IPage selectVirtualTablePage( + Page page, String pluginName, String datasourceName); + + IPage selectDatasourceByParam(Page page, Long datasourceId); + + List getVirtualTableNames(String databaseName, Long datasourceId); + + List getVirtualDatabaseNames(Long datasourceId); + + boolean checkHasVirtualTable(Long datasourceId); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/TaskDefinitionDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/TaskDefinitionDao.java new file mode 100644 index 000000000..7e5ac6e41 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/TaskDefinitionDao.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao; + +import org.apache.seatunnel.app.dal.entity.TaskDefinition; +import org.apache.seatunnel.app.dal.entity.TaskMainInfo; + +import java.util.Collection; +import java.util.List; + +public interface TaskDefinitionDao { + + List queryByDataSourceId(Long dataSourceId); + + List queryTaskDefinitions(Collection taskCodes); + + List queryByWorkflowDefinitionCodeAndVersion( + Long workflowDefinitionCode, Integer workflowDefinitionVersion); +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/DatasourceDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/DatasourceDaoImpl.java new file mode 100644 index 000000000..fc5c760c5 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/DatasourceDaoImpl.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IDatasourceDao; +import org.apache.seatunnel.app.dal.entity.Datasource; +import org.apache.seatunnel.app.dal.mapper.DatasourceMapper; + +import org.springframework.stereotype.Repository; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +import javax.annotation.Resource; + +import java.util.List; + +@Repository +public class DatasourceDaoImpl implements IDatasourceDao { + + @Resource private DatasourceMapper datasourceMapper; + + @Override + public boolean insertDatasource(Datasource datasource) { + return datasourceMapper.insert(datasource) > 0; + } + + @Override + public Datasource selectDatasourceById(Long id) { + return datasourceMapper.selectById(id); + } + + @Override + public boolean deleteDatasourceById(Long id) { + return datasourceMapper.deleteById(id) > 0; + } + + @Override + public Datasource queryDatasourceByName(String name) { + return datasourceMapper.selectOne( + new QueryWrapper().eq("datasource_name", name)); + } + + @Override + public boolean updateDatasourceById(Datasource datasource) { + return datasourceMapper.updateById(datasource) > 0; + } + + @Override + public boolean checkDatasourceNameUnique(String dataSourceName, Long dataSourceId) { + QueryWrapper queryWrapper = new QueryWrapper<>(); + queryWrapper.eq("datasource_name", dataSourceName); + queryWrapper.ne("id", dataSourceId); + return datasourceMapper.selectList(queryWrapper).size() <= 0; + } + + @Override + public IPage selectDatasourcePage(Page page) { + return datasourceMapper.selectPage(page, new QueryWrapper()); + } + + @Override + public IPage selectDatasourceByParam( + Page page, + List availableDatasourceIds, + String searchVal, + String pluginName) { + + QueryWrapper datasourceQueryWrapper = new QueryWrapper<>(); + datasourceQueryWrapper.in("id", availableDatasourceIds); + if (searchVal != null + && !searchVal.isEmpty() + && pluginName != null + && !pluginName.isEmpty()) { + return datasourceMapper.selectPage( + page, + datasourceQueryWrapper + .eq("plugin_name", pluginName) + .like("datasource_name", searchVal)); + } + if (searchVal != null && !searchVal.isEmpty()) { + return datasourceMapper.selectPage( + page, datasourceQueryWrapper.like("datasource_name", searchVal)); + } + if (pluginName != null && !pluginName.isEmpty()) { + return datasourceMapper.selectPage( + page, datasourceQueryWrapper.eq("plugin_name", pluginName)); + } + return datasourceMapper.selectPage(page, datasourceQueryWrapper); + } + + @Override + public String queryDatasourceNameById(Long id) { + return datasourceMapper.selectById(id).getDatasourceName(); + } + + @Override + public List selectDatasourceByPluginName(String pluginName, String pluginVersion) { + return datasourceMapper.selectList( + new QueryWrapper() + .eq("plugin_name", pluginName) + .eq("plugin_version", pluginVersion)); + } + + @Override + public List selectDatasourceByIds(List ids) { + return datasourceMapper.selectBatchIds(ids); + } + + @Override + public List queryAll() { + return datasourceMapper.selectList(new QueryWrapper<>()); + } + + @Override + public List selectByIds(List ids) { + return datasourceMapper.selectBatchIds(ids); + } + + @Override + public List selectDatasourceByUserId(int userId) { + QueryWrapper queryWrapper = new QueryWrapper<>(); + queryWrapper.eq("create_user_id", userId); + return datasourceMapper.selectList(queryWrapper); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobDefinitionDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobDefinitionDaoImpl.java new file mode 100644 index 000000000..2d010cc80 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobDefinitionDaoImpl.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IJobDefinitionDao; +import org.apache.seatunnel.app.dal.entity.JobDefinition; +import org.apache.seatunnel.app.dal.mapper.JobMapper; +import org.apache.seatunnel.app.domain.response.PageInfo; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.stereotype.Repository; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.core.toolkit.Wrappers; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import lombok.NonNull; + +import javax.annotation.Resource; + +import java.util.List; + +@Repository +public class JobDefinitionDaoImpl implements IJobDefinitionDao { + + @Resource private JobMapper jobMapper; + + @Override + public void add(JobDefinition job) { + jobMapper.insert(job); + } + + @Override + public JobDefinition getJob(long id) { + return jobMapper.selectById(id); + } + + @Override + public void updateJob(JobDefinition jobDefinition) { + jobMapper.updateById(jobDefinition); + } + + @Override + public PageInfo getJob( + String searchName, Integer pageNo, Integer pageSize, String jobMode) { + IPage jobDefinitionIPage; + if (StringUtils.isEmpty(jobMode)) { + jobDefinitionIPage = + jobMapper.queryJobListPaging(new Page<>(pageNo, pageSize), searchName); + } else { + jobDefinitionIPage = + jobMapper.queryJobListPagingWithJobMode( + new Page<>(pageNo, pageSize), searchName, jobMode); + } + PageInfo jobs = new PageInfo<>(); + jobs.setData(jobDefinitionIPage.getRecords()); + jobs.setPageSize(pageSize); + jobs.setPageNo(pageNo); + jobs.setTotalCount((int) jobDefinitionIPage.getTotal()); + return jobs; + } + + @Override + public List getJobList(@NonNull String name) { + return jobMapper.queryJobList(name); + } + + @Override + public JobDefinition getJobByName(@NonNull String name) { + return jobMapper.queryJob(name); + } + + public void delete(long id) { + jobMapper.delete( + Wrappers.lambdaQuery(new JobDefinition()).and(i -> i.eq(JobDefinition::getId, id))); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobInstanceDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobInstanceDaoImpl.java new file mode 100644 index 000000000..4eff1e18d --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobInstanceDaoImpl.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IJobInstanceDao; +import org.apache.seatunnel.app.dal.entity.JobInstance; +import org.apache.seatunnel.app.dal.mapper.JobInstanceMapper; +import org.apache.seatunnel.app.domain.dto.job.SeaTunnelJobInstanceDto; + +import org.springframework.stereotype.Repository; + +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import lombok.NonNull; + +import javax.annotation.Resource; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +@Repository +public class JobInstanceDaoImpl implements IJobInstanceDao { + @Resource private JobInstanceMapper jobInstanceMapper; + + @Override + public JobInstance getJobInstance(@NonNull Long jobInstanceId) { + return jobInstanceMapper.selectById(jobInstanceId); + } + + @Override + public JobInstance getJobInstanceByEngineId(@NonNull Long jobEngineId) { + return jobInstanceMapper.selectOne( + new LambdaQueryWrapper<>(new JobInstance()) + .eq(JobInstance::getJobEngineId, jobEngineId)); + } + + @Override + public void update(@NonNull JobInstance jobInstance) { + jobInstanceMapper.updateById(jobInstance); + } + + @Override + public void insert(@NonNull JobInstance jobInstance) { + jobInstanceMapper.insert(jobInstance); + } + + @Override + public JobInstanceMapper getJobInstanceMapper() { + return jobInstanceMapper; + } + + @Override + public IPage queryJobInstanceListPaging( + IPage page, + Date startTime, + Date endTime, + Long jobDefineId, + String jobMode) { + IPage jobInstanceIPage = + jobInstanceMapper.queryJobInstanceListPaging( + page, startTime, endTime, jobDefineId, jobMode); + return jobInstanceIPage; + } + + @Override + public List getAllJobInstance(@NonNull List jobInstanceIdList) { + ArrayList jobInstances = new ArrayList<>(); + for (long jobInstanceId : jobInstanceIdList) { + JobInstance jobInstance = jobInstanceMapper.selectById(jobInstanceId); + jobInstances.add(jobInstance); + } + + return jobInstances; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobInstanceHistoryDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobInstanceHistoryDaoImpl.java new file mode 100644 index 000000000..3655b0219 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobInstanceHistoryDaoImpl.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IJobInstanceHistoryDao; +import org.apache.seatunnel.app.dal.entity.JobInstanceHistory; +import org.apache.seatunnel.app.dal.mapper.JobInstanceHistoryMapper; + +import org.springframework.stereotype.Repository; + +import com.baomidou.mybatisplus.core.toolkit.Wrappers; + +import javax.annotation.Resource; + +@Repository +public class JobInstanceHistoryDaoImpl implements IJobInstanceHistoryDao { + + @Resource private JobInstanceHistoryMapper jobInstanceHistoryMapper; + + @Override + public JobInstanceHistory getByInstanceId(Long jobInstanceId) { + return jobInstanceHistoryMapper.selectOne( + Wrappers.lambdaQuery(new JobInstanceHistory()) + .eq(JobInstanceHistory::getId, jobInstanceId)); + } + + @Override + public void insert(JobInstanceHistory jobInstanceHistory) { + jobInstanceHistoryMapper.insert(jobInstanceHistory); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobLineDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobLineDaoImpl.java new file mode 100644 index 000000000..3be8e972d --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobLineDaoImpl.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IJobLineDao; +import org.apache.seatunnel.app.dal.entity.JobLine; +import org.apache.seatunnel.app.dal.mapper.JobLineMapper; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.toolkit.Wrappers; + +import javax.annotation.Resource; + +import java.util.List; + +@Service +public class JobLineDaoImpl implements IJobLineDao { + + @Resource private JobLineMapper jobLineMapper; + + @Override + public void deleteLinesByVersionId(long jobVersionId) { + jobLineMapper.deleteLinesByVersionId(jobVersionId); + } + + @Override + public void insertLines(List lines) { + jobLineMapper.insertBatchLines(lines); + } + + @Override + public List getLinesByVersionId(long jobVersionId) { + return jobLineMapper.selectList( + Wrappers.lambdaQuery(new JobLine()).eq(JobLine::getVersionId, jobVersionId)); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobMetricsDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobMetricsDaoImpl.java new file mode 100644 index 000000000..11cc40e0e --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobMetricsDaoImpl.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IJobMetricsDao; +import org.apache.seatunnel.app.dal.entity.JobMetrics; +import org.apache.seatunnel.app.dal.mapper.JobMetricsMapper; + +import org.springframework.stereotype.Repository; + +import lombok.NonNull; + +import javax.annotation.Resource; + +import java.util.List; + +@Repository +public class JobMetricsDaoImpl implements IJobMetricsDao { + + @Resource private JobMetricsMapper jobMetricsMapper; + + @Override + public List getByInstanceId(@NonNull Long jobInstanceId) { + return jobMetricsMapper.queryJobMetricsByInstanceId(jobInstanceId); + } + + @Override + public JobMetricsMapper getJobMetricsMapper() { + return jobMetricsMapper; + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobTaskDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobTaskDaoImpl.java new file mode 100644 index 000000000..540e4c72b --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobTaskDaoImpl.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IJobTaskDao; +import org.apache.seatunnel.app.dal.entity.JobTask; +import org.apache.seatunnel.app.dal.mapper.JobTaskMapper; + +import org.springframework.stereotype.Repository; + +import com.baomidou.mybatisplus.core.toolkit.Wrappers; + +import javax.annotation.Resource; + +import java.util.List; + +@Repository +public class JobTaskDaoImpl implements IJobTaskDao { + + @Resource private JobTaskMapper jobTaskMapper; + + @Override + public List getTasksByVersionId(long jobVersionId) { + return jobTaskMapper.selectList( + Wrappers.lambdaQuery(new JobTask()).eq(JobTask::getVersionId, jobVersionId)); + } + + @Override + public void insertTask(JobTask jobTask) { + if (jobTask != null) { + jobTaskMapper.insert(jobTask); + } + } + + @Override + public void updateTask(JobTask jobTask) { + if (jobTask != null) { + jobTaskMapper.updateById(jobTask); + } + } + + @Override + public JobTask getTask(long jobVersionId, String pluginId) { + return jobTaskMapper.selectOne( + Wrappers.lambdaQuery(new JobTask()) + .eq(JobTask::getVersionId, jobVersionId) + .and(i -> i.eq(JobTask::getPluginId, pluginId))); + } + + @Override + public List getJobTaskByDataSourceId(long datasourceId) { + return jobTaskMapper.selectList( + Wrappers.lambdaQuery(new JobTask()).eq(JobTask::getDataSourceId, datasourceId)); + } + + @Override + public void updateTasks(List jobTasks) { + jobTasks.forEach(jobTaskMapper::updateById); + } + + @Override + public void deleteTasks(List jobTaskIds) { + if (!jobTaskIds.isEmpty()) { + jobTaskMapper.deleteBatchIds(jobTaskIds); + } + } + + @Override + public void deleteTask(long jobVersionId, String pluginId) { + jobTaskMapper.delete( + Wrappers.lambdaQuery(new JobTask()) + .eq(JobTask::getVersionId, jobVersionId) + .and(i -> i.eq(JobTask::getPluginId, pluginId))); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobVersionDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobVersionDaoImpl.java new file mode 100644 index 000000000..4f6c52d96 --- /dev/null +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/JobVersionDaoImpl.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.seatunnel.app.dal.dao.impl; + +import org.apache.seatunnel.app.dal.dao.IJobVersionDao; +import org.apache.seatunnel.app.dal.entity.JobVersion; +import org.apache.seatunnel.app.dal.mapper.JobVersionMapper; + +import org.springframework.stereotype.Repository; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; + +import javax.annotation.Resource; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Repository +public class JobVersionDaoImpl implements IJobVersionDao { + + @Resource private JobVersionMapper jobVersionMapper; + + @Override + public void createVersion(JobVersion jobVersion) { + jobVersionMapper.insert(jobVersion); + } + + @Override + public void updateVersion(JobVersion version) { + jobVersionMapper.updateById(version); + } + + @Override + public JobVersion getLatestVersion(long jobId) { + Map queryMap = new HashMap<>(); + queryMap.put("job_id", jobId); + return jobVersionMapper.selectByMap(queryMap).get(0); + } + + @Override + public List getLatestVersionByJobIds(List jobIds) { + QueryWrapper wrapper = new QueryWrapper(); + wrapper.in("job_id", jobIds); + return jobVersionMapper.selectList(wrapper); + } + + @Override + public JobVersion getVersionById(long jobVersionId) { + return jobVersionMapper.selectById(jobVersionId); + } + + @Override + public List getVersionsByIds(List jobVersionIds) { + return jobVersionMapper.selectBatchIds(jobVersionIds); + } +} diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java index ae0b32629..227f8f2b4 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java @@ -28,11 +28,10 @@ @Repository public class RoleDaoImpl implements IRoleDao { - @Resource - private RoleMapper roleMapper; + @Resource private RoleMapper roleMapper; @Override - public void add(Role role){ + public void add(Role role) { roleMapper.insert(role); } @@ -40,5 +39,4 @@ public void add(Role role){ public Role getByRoleName(String roleName) { return roleMapper.selectByRole(roleName); } - } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java index 8bc66ddce..bee2cd7b3 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java @@ -28,17 +28,17 @@ @Repository public class RoleUserRelationDaoImpl implements IRoleUserRelationDao { - @Resource - private RoleUserRelationMapper roleUserRelationMapper; + @Resource private RoleUserRelationMapper roleUserRelationMapper; @Override - public void add(RoleUserRelation roleUserRelation){ + public void add(RoleUserRelation roleUserRelation) { roleUserRelationMapper.insert(roleUserRelation); } @Override public RoleUserRelation getByUserAndRole(Integer userId, Integer roleId) { - final RoleUserRelation roleUserRelation = roleUserRelationMapper.selectByUserIdAndRoleId(userId, roleId); + final RoleUserRelation roleUserRelation = + roleUserRelationMapper.selectByUserIdAndRoleId(userId, roleId); return roleUserRelation; } @@ -46,5 +46,4 @@ public RoleUserRelation getByUserAndRole(Integer userId, Integer roleId) { public void deleteByUserId(Integer userId) { roleUserRelationMapper.deleteByUserId(userId); } - } diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java index fa78042bd..1d6ac1fdd 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java @@ -29,8 +29,7 @@ @Repository public class SchedulerConfigDaoImpl implements ISchedulerConfigDao { - @Resource - private SchedulerConfigMapper schedulerConfigMapper; + @Resource private SchedulerConfigMapper schedulerConfigMapper; @Override public boolean exists(int scriptId) { diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java index 7f18048d8..9e2ebdc5f 100644 --- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java +++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java @@ -17,9 +17,6 @@ package org.apache.seatunnel.app.dal.dao.impl; -import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.SCRIPT_ALREADY_EXIST; -import static com.google.common.base.Preconditions.checkState; - import org.apache.seatunnel.app.common.ScriptStatusEnum; import org.apache.seatunnel.app.dal.dao.IScriptDao; import org.apache.seatunnel.app.dal.entity.Script; @@ -37,16 +34,24 @@ import java.util.List; import java.util.Objects; +import static com.google.common.base.Preconditions.checkState; +import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.SCRIPT_ALREADY_EXIST; + @Repository public class ScriptDaoImpl implements IScriptDao { - @Resource - private ScriptMapper scriptMapper; + @Resource private ScriptMapper scriptMapper; @Override public void checkScriptDuplicate(CheckScriptDuplicateDto dto) { - final Script script = scriptMapper.selectByNameAndCreatorAndStatusNotEq(dto.getName(), dto.getCreatorId(), (byte) ScriptStatusEnum.DELETED.getCode()); - checkState(Objects.isNull(script), String.format(SCRIPT_ALREADY_EXIST.getTemplate(), dto.getName())); + final Script script = + scriptMapper.selectByNameAndCreatorAndStatusNotEq( + dto.getName(), + dto.getCreatorId(), + (byte) ScriptStatusEnum.DELETED.getCode()); + checkState( + Objects.isNull(script), + String.format(SCRIPT_ALREADY_EXIST.getTemplate(), dto.getName())); } @Override @@ -69,7 +74,8 @@ public Script getScript(Integer id) { @Override public void updateScriptContent(UpdateScriptContentDto dto) { - scriptMapper.updateContentByPrimaryKey(dto.getId(), dto.getContent(), dto.getContentMd5(), dto.getMenderId()); + scriptMapper.updateContentByPrimaryKey( + dto.getId(), dto.getContent(), dto.getContentMd5(), dto.getMenderId()); } @Override @@ -82,7 +88,8 @@ public PageData