-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add FlinkDeploymentTemplate to get default deployment with sql-runner (…
…#36) Signed-off-by: David Kornel <[email protected]>
- Loading branch information
Showing
4 changed files
with
151 additions
and
16 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
/* | ||
* Copyright streamshub authors. | ||
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). | ||
*/ | ||
package io.streams.constants; | ||
|
||
public interface FlinkConstants { | ||
String TEST_SQL_EXAMPLE_STATEMENT = | ||
"CREATE TABLE ProductInventoryTable ( product_id STRING, category STRING, stock STRING, rating STRING ) " + | ||
"WITH ( 'connector' = 'filesystem', 'path' = '/opt/flink/data/productInventory.csv', " + | ||
"'format' = 'csv', 'csv.ignore-parse-errors' = 'true' ); CREATE TABLE ClickStreamTable " + | ||
"( user_id STRING, product_id STRING, `event_time` TIMESTAMP(3) METADATA FROM 'timestamp', " + | ||
"WATERMARK FOR event_time AS event_time - INTERVAL '1' SECOND ) WITH ( 'connector' = 'kafka', " + | ||
"'topic' = 'flink.click.streams', 'properties.bootstrap.servers' = " + | ||
"'my-cluster-kafka-bootstrap.flink.svc:9092', 'properties.group.id' = 'click-stream-group', " + | ||
"'value.format' = 'avro-confluent', 'value.avro-confluent.schema-registry.url' = " + | ||
"'http://apicurio-registry-service.flink.svc:8080/apis/ccompat/v6', 'scan.startup.mode' = " + | ||
"'latest-offset' ); CREATE TABLE SalesRecordTable ( invoice_id STRING, user_id STRING, product_id STRING, " + | ||
"quantity STRING, unit_cost STRING, `purchase_time` TIMESTAMP(3) METADATA FROM 'timestamp', " + | ||
"WATERMARK FOR purchase_time AS purchase_time - INTERVAL '1' SECOND ) WITH ( 'connector' = 'kafka', " + | ||
"'topic' = 'flink.sales.records', 'properties.bootstrap.servers' = " + | ||
"'my-cluster-kafka-bootstrap.flink.svc:9092', 'properties.group.id' = 'sales-record-group', " + | ||
"'value.format' = 'avro-confluent', 'value.avro-confluent.schema-registry.url' = " + | ||
"'http://apicurio-registry-service.flink.svc:8080/apis/ccompat/v6', 'scan.startup.mode' = " + | ||
"'latest-offset' ); CREATE TABLE CsvSinkTable ( user_id STRING, top_product_ids STRING, " + | ||
"`event_time` TIMESTAMP(3), PRIMARY KEY(`user_id`) NOT ENFORCED ) WITH ( 'connector' = 'upsert-kafka', " + | ||
"'topic' = 'flink.recommended.products', 'properties.bootstrap.servers' = " + | ||
"'my-cluster-kafka-bootstrap.flink.svc:9092', 'properties.client.id' = " + | ||
"'recommended-products-producer-client', 'properties.transaction.timeout.ms' = '800000', " + | ||
"'key.format' = 'csv', 'value.format' = 'csv', 'value.fields-include' = 'ALL' ); CREATE TEMPORARY " + | ||
"VIEW clicked_products AS SELECT DISTINCT c.user_id, c.event_time, p.product_id, p.category " + | ||
"FROM ClickStreamTable AS c JOIN ProductInventoryTable AS p ON c.product_id = p.product_id; " + | ||
"CREATE TEMPORARY VIEW category_products AS SELECT cp.user_id, cp.event_time, p.product_id, " + | ||
"p.category, p.stock, p.rating, sr.user_id as purchased FROM clicked_products cp JOIN " + | ||
"ProductInventoryTable AS p ON cp.category = p.category LEFT JOIN SalesRecordTable sr ON " + | ||
"cp.user_id = sr.user_id AND p.product_id = sr.product_id WHERE p.stock > 0 GROUP BY p.product_id, " + | ||
"p.category, p.stock, cp.user_id, cp.event_time, sr.user_id, p.rating; CREATE TEMPORARY VIEW " + | ||
"top_products AS SELECT cp.user_id, cp.event_time, cp.product_id, cp.category, cp.stock, cp.rating, " + | ||
"cp.purchased, ROW_NUMBER() OVER (PARTITION BY cp.user_id ORDER BY cp.purchased DESC, cp.rating DESC) " + | ||
"AS rn FROM category_products cp; INSERT INTO CsvSinkTable SELECT user_id, LISTAGG(product_id, ',') " + | ||
"AS top_product_ids, TUMBLE_END(event_time, INTERVAL '5' SECOND) FROM top_products WHERE rn <= 6 GROUP " + | ||
"BY user_id, TUMBLE(event_time, INTERVAL '5' SECOND);"; | ||
} |
104 changes: 104 additions & 0 deletions
104
src/main/java/io/streams/operands/flink/templates/FlinkDeploymentTemplate.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
/* | ||
* Copyright streamshub authors. | ||
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). | ||
*/ | ||
package io.streams.operands.flink.templates; | ||
|
||
import org.apache.flink.v1beta1.FlinkDeploymentBuilder; | ||
import org.apache.flink.v1beta1.FlinkDeploymentSpec; | ||
import org.apache.flink.v1beta1.flinkdeploymentspec.Job; | ||
|
||
import java.util.List; | ||
import java.util.Map; | ||
|
||
/** | ||
* FlinkDeployment templates | ||
*/ | ||
public class FlinkDeploymentTemplate { | ||
|
||
/** | ||
* Return default flink deployment for sql runner | ||
* | ||
* @param namespace namespace of flink deployment | ||
* @param name name of deployment | ||
* @param args args for sql runner | ||
* @return flink deployment builder | ||
*/ | ||
public static FlinkDeploymentBuilder defaultFlinkDeployment(String namespace, String name, List<String> args) { | ||
return new FlinkDeploymentBuilder() | ||
.withNewMetadata() | ||
.withName(name) | ||
.withNamespace(namespace) | ||
.endMetadata() | ||
.withNewSpec() | ||
.withImage("quay.io/streamshub/flink-sql-runner:latest") | ||
.withFlinkVersion(FlinkDeploymentSpec.FlinkVersion.v1_19) | ||
.withFlinkConfiguration( | ||
Map.of("taskmanager.numberOfTaskSlots", "1") | ||
) | ||
.withServiceAccount("flink") | ||
.withNewPodTemplate() | ||
.withKind("Pod") | ||
.withNewMetadata() | ||
.withName(name) | ||
.endFlinkdeploymentspecMetadata() | ||
.withNewSpec() | ||
.addNewContainer() | ||
.withName("flink-main-container") | ||
.withImage("quay.io/streamshub/flink-sql-runner:latest") | ||
.addNewVolumeMount() | ||
.withName("product-inventory-vol") | ||
.withMountPath("/opt/flink/data") | ||
.endFlinkdeploymentspecVolumeMount() | ||
.addNewVolumeMount() | ||
.withName("flink-logs") | ||
.withMountPath("/opt/flink/log") | ||
.endFlinkdeploymentspecVolumeMount() | ||
.addNewVolumeMount() | ||
.withName("flink-artifacts") | ||
.withMountPath("/opt/flink/artifacts") | ||
.endFlinkdeploymentspecVolumeMount() | ||
.endFlinkdeploymentspecContainer() | ||
.addNewVolume() | ||
.withName("product-inventory-vol") | ||
.withNewConfigMap() | ||
.withName("product-inventory") | ||
.addNewItem() | ||
.withKey("productInventory.csv") | ||
.withPath("productInventory.csv") | ||
.endFlinkdeploymentspecItem() | ||
.endFlinkdeploymentspecConfigMap() | ||
.endFlinkdeploymentspecVolume() | ||
.addNewVolume() | ||
.withName("flink-logs") | ||
.withNewEmptyDir() | ||
.endFlinkdeploymentspecEmptyDir() | ||
.endFlinkdeploymentspecVolume() | ||
.addNewVolume() | ||
.withName("flink-artifacts") | ||
.withNewEmptyDir() | ||
.endFlinkdeploymentspecEmptyDir() | ||
.endFlinkdeploymentspecVolume() | ||
.endFlinkdeploymentspecSpec() | ||
.endPodTemplate() | ||
.withNewJobManager() | ||
.withNewResource() | ||
.withCpu(1.0) | ||
.withMemory("2048m") | ||
.endResource() | ||
.endJobManager() | ||
.withNewTaskManager() | ||
.withNewResource() | ||
.withCpu(1.0) | ||
.withMemory("2048m") | ||
.endTaskmanagerResource() | ||
.endTaskManager() | ||
.withNewJob() | ||
.withJarURI("local:///opt/flink/usrlib/flink-sql-runner.jar") | ||
.withParallelism(1L) | ||
.withUpgradeMode(Job.UpgradeMode.stateless) | ||
.withArgs(args) | ||
.endJob() | ||
.endSpec(); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters