FlinkSQL读取MySQL大多用作维表关联, 聚合结果写入MySQL,简单记录一下用法。
package com.sm.job
import com.sm.utils.FlinkUtils
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.log4j.Level
import org.slf4j.LoggerFactory
/**
* flinkSQL 读写 mysql
*
* create by LiuJinHe 2020/10/22
*/
object FlinkJdbcConnector {
private var logger: org.slf4j.Logger = _
def main(args: Array[String]): Unit = {
logger = LoggerFactory.getLogger(this.getClass.getSimpleName)
org.apache.log4j.Logger.getLogger("org.apache.hadoop").setLevel(Level.WARN)
org.apache.log4j.Logger.getLogger("org.apache").setLevel(Level.INFO)
org.apache.log4j.Logger.getLogger("io.debezium").setLevel(Level.INFO)
// 初始化 stream 环境
// 本地测试,需要 flink-runtime-web 依赖
val env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
// val env = StreamExecutionEnvironment.getExecutionEnvironment
// 失败重启,固定间隔,每隔3秒重启1次,总尝试重启10次
// env.setRestartStrategy(RestartStrategies.fixedDelayRestart(10, 3))
// 本地测试线程 1
env.