基于flink1.13.0 scala2.12开发,新建class编写一个main
package com.yp.flink;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import java.util.concurrent.TimeUnit;
public class MysqlTest {
public static void main(String[] args) throws Exception {
//远程环境提交
// StreamExecutionEnvironment env = StreamExecutionEnvironment.createRemoteEnvironment("192.168.153.129",8081);
//本地环境提交
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.setRestartStrategy(RestartStrategies.failureRateRestart(
300,
Time.of(5, TimeUnit.MINUTES),
Time.of(10, TimeUnit.SECONDS)
));
// env.getConfig().getConfiguration().setString("table.exec.state.ttl", "30 s");
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
String sourceDDL = "CREATE TABLE Orders (f1 STRING, f2 STRING, f3 STRING) WITH ( 'connector' = 'kafka'," +
" 'topic' = 't1'," +
" 'properties.bootstrap.servers' = '192.168.153.129:9092'," +
" 'properties.group.id' = 'atguigu'," +
" 'scan.startup.mode' = 'latest-offset'," +
" 'format' = 'csv')";
tableEnv.executeSql(sourceDDL);
String sinkDDL = "CREATE TABLE test_cjy_sink (" +
" field_a STRING," +
" field_b STRING, " +
" field_c STRING" +
// " PRIMARY KEY(field_a) NOT ENFORCED" +
") WITH ( " +
" 'connector' = 'jdbc', " +
" 'driver'='com.mysql.cj.jdbc.Driver', " +
" 'url'='jdbc:mysql://172.16.0.68:3306/titandb', " +
" 'table-name'='test_cjy', " +
" 'username'='root', " +
" 'password'='root' " +
")";
System.out.println(sinkDDL);
tableEnv.executeSql(sinkDDL);
String insertSql = "insert into test_cjy_sink(field_a, field_b, field_c) " +
"select f1,f2,f3 from Orders";
tableEnv.executeSql(insertSql);
}
}
注意:高版本的flink,flink-table-planner-blink依赖拉不来,1.14的版本怎么都测不过去。pom文件如下
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.5.5</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.yp</groupId>
<artifactId>flink_remotesubmit</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<flink.version>1.13.0</flink.version>
<scala.version>2.12</scala.version>
<logback.version>1.2.3</logback.version>
<log4j-over-slf4j.version>1.7.30</log4j-over-slf4j.version>
</properties>
<dependencies>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.27</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_${scala.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_${scala.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-csv</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc_${scala.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-sql-client_${scala.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.6</version>
</dependency>
<!--table api-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge_${scala.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<!-- <groupId>org.apache.flink</groupId>-->
<!-- <artifactId>flink-table-planner-blink_${scala.version}</artifactId>-->
<!-- <version>1.3.0</version>-->
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_2.12</artifactId>
<version>1.13.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>5.1.0</version>
</dependency>
<!--table api-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink.version}</version>
</dependency>
</dependencies>
</project>

这样就完成了一个简单的Flink sql的编写,source 来源kafka,sink到mysql中。