将E://sql.txt里面的字符串转换为StringBuffer 格式

本文提供了一个使用Java进行文件读取的示例代码,通过BufferedReader按行读取文件内容,并将每行内容转换为大写。适用于需要处理文本文件或将文件内容整合到字符串缓冲区的应用场景。
package com.pps.test;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;


public class Test {
    public static void main(String[] args) throws IOException{
        Test.readFileByLines("E:/sql.txt");
    }
    
    /**
     * 以行为单位读取文件,常用于读面向行的格式化文件
     *
     * @param fileName
     *            文件名
     */
    public static String readFileByLines(String fileName) throws IOException {
        StringBuffer fileContext = new StringBuffer("");
        File file = new File(fileName);
        BufferedReader reader = null;
        try {
            reader = new BufferedReader(new FileReader(file));
            String tempString = null;
            int line = 1;
            // 一次读入一行,直到读入null为文件结束
            while ((tempString = reader.readLine()) != null) {
                // 显示行号
                fileContext.append(line + ":" + tempString.toUpperCase() + "\n");
                if(line==1){
                    System.out.println("StringBuffer sql = new StringBuffer(\" " + tempString.toUpperCase() + " \");");
                }else{
                    System.out.println("sql.append(\" "+tempString.toUpperCase()+" \");");
                }
                line++;
            }
            System.out.println("return sql.toString();");
            reader.close();
        } catch (IOException e) {
            throw e;
        } finally {
            if (reader != null) {
                try {
                    reader.close();
                } catch (IOException e) {
                    throw e;
                }
            }
        }
        return fileContext.toString();
    }
}


Exception in thread "main" org.apache.flink.runtime.client.JobExecutionException: Job execution failed. at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144) at org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$3(MiniClusterJobClient.java:137) at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602) at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$1(AkkaInvocationHandler.java:258) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.util.concurrent.FutureUtils.doForward(FutureUtils.java:1389) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$null$1(ClassLoadingUtils.java:93) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:68) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.lambda$guardCompletionWithContextClassLoader$2(ClassLoadingUtils.java:92) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$1.onComplete(AkkaFutureUtils.java:47) at akka.dispatch.OnComplete.internal(Future.scala:300) at akka.dispatch.OnComplete.internal(Future.scala:297) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:224) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:221) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at org.apache.flink.runtime.concurrent.akka.AkkaFutureUtils$DirectExecutionContext.execute(AkkaFutureUtils.java:65) at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:68) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.$anonfun$tryComplete$1$adapted(Promise.scala:284) at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:284) at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:621) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:24) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:23) at scala.concurrent.Future.$anonfun$andThen$1(Future.scala:532) at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:29) at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:29) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:60) at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:63) at akka.dispatch.BatchingExecutor$BlockableBatch.$anonfun$run$1(BatchingExecutor.scala:100) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:12) at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:81) at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:100) at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:49) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:48) at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289) at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056) at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692) at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157) Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by FixedDelayRestartBackoffTimeStrategy(maxNumberRestartAttempts=3, backoffTimeMS=10000) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82) at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252) at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242) at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233) at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684) at org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79) at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444) at sun.reflect.GeneratedMethodAccessor13.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316) at org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217) at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20) at scala.PartialFunction.applyOrElse(PartialFunction.scala:123) at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122) at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172) at akka.actor.Actor.aroundReceive(Actor.scala:537) at akka.actor.Actor.aroundReceive$(Actor.scala:535) at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580) at akka.actor.ActorCell.invoke(ActorCell.scala:548) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270) at akka.dispatch.Mailbox.run(Mailbox.scala:231) at akka.dispatch.Mailbox.exec(Mailbox.scala:243) ... 4 more Caused by: java.lang.RuntimeException: One or more fetchers have encountered exception at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcherManager.checkErrors(SplitFetcherManager.java:225) at org.apache.flink.connector.base.source.reader.SourceReaderBase.getNextFetch(SourceReaderBase.java:169) at org.apache.flink.connector.base.source.reader.SourceReaderBase.pollNext(SourceReaderBase.java:130) at org.apache.flink.streaming.api.operators.SourceOperator.emitNext(SourceOperator.java:354) at org.apache.flink.streaming.runtime.io.StreamTaskSourceInput.emitNext(StreamTaskSourceInput.java:68) at org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:65) at org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:496) at org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:203) at org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) at org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958) at org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.RuntimeException: SplitFetcher thread 0 received unexpected exception while polling the records at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:150) at org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.run(SplitFetcher.java:105) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) ... 1 more Caused by: com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.errors.ConnectException: An exception occurred in the change event producer. This connector will be stopped. at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:50) at com.ververica.cdc.connectors.mysql.debezium.task.context.MySqlErrorHandler.setProducerThrowable(MySqlErrorHandler.java:85) at io.debezium.connector.mysql.MySqlStreamingChangeEventSource$ReaderThreadLifecycleListener.onCommunicationFailure(MySqlStreamingChangeEventSource.java:1545) at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1079) at com.github.shyiko.mysql.binlog.BinaryLogClient.connect(BinaryLogClient.java:631) at com.github.shyiko.mysql.binlog.BinaryLogClient$7.run(BinaryLogClient.java:932) ... 1 more Caused by: io.debezium.DebeziumException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020. Error code: 1236; SQLSTATE: HY000. The 'server-id' in the mysql cdc connector should be globally unique, but conflicts happen now. The server id conflict may happen in the following situations: 1. The server id has been used by other mysql cdc table in the current job. 2. The server id has been used by the mysql cdc table in other jobs. 3. The server id has been used by other sync tools like canal, debezium and so on. at io.debezium.connector.mysql.MySqlStreamingChangeEventSource.wrap(MySqlStreamingChangeEventSource.java:1489) ... 5 more Caused by: com.github.shyiko.mysql.binlog.network.ServerException: A slave with the same server_uuid/server_id as this slave has connected to the master; the first event '' at 4, the last event read from './mysql-bin.000001' at 482020, the last byte read from './mysql-bin.000001' at 482020. at com.github.shyiko.mysql.binlog.BinaryLogClient.listenForEventPackets(BinaryLogClient.java:1043) ... 3 more我现在出现这个报错 我的调用代码是package net.bwie.realtime.jtp.common.utils; import com.ververica.cdc.connectors.mysql.source.MySqlSource; import com.ververica.cdc.connectors.mysql.table.StartupOptions; import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; import com.ververica.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig; import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema; import io.debezium.spi.converter.CustomConverter; import io.debezium.spi.converter.RelationalColumn; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; //import org.apache.kafka.connect.data.SchemaBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.UnknownHostException; import java.net.InetAddress; import java.net.UnknownHostException; import java.time.*; import java.time.format.DateTimeFormatter; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.function.Consumer; /** * Flink CDC实时捕获Mysql数据库表中数据 * @author xuanyun */ public class MysqlCdcUtil { /** * Flink CDC 读取数据时,没有特殊设置反序列化,及针对Decimal类型和DateTime类型数据。 */ public static DataStream<String> cdcMysqlRaw(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException { // a. 数据源 MySqlSource<String> mysqlsource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(database + "." + table) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .deserializer(new JsonDebeziumDeserializationSchema()) .build(); // b. 读取数据 DataStreamSource<String> stream = env.fromSource( mysqlsource, WatermarkStrategy.noWatermarks(), "MysqlSource" ); // c. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param table 表名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlDeser(StreamExecutionEnvironment env, String database, String table) throws UnknownHostException { // a. 反序列化:DECIMAL类型数据使用NUMERIC数值转换 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(database + "." + table) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param tableList 表名称,可以传递多个 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlEarliest(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.earliest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlEarliestSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从binlog中最早offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysqlInitial(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.initial()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLInitialSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList() .username("root") .password("123456") .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.latest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MysqlLatestSource" ); // d. 返回 return stream; } /** * 使用Flink CDC方式,拉取Mysql表数据,从最新offset偏移量读取数据 * @param env 流式执行环境 * @param database 数据库名称 * @param tableList 表名称,可以传递多个 * @return 数据流,数据类型为json字符串 */ public static DataStream<String> cdcMysql(StreamExecutionEnvironment env, String database, String... tableList) throws UnknownHostException { // a. 反序列化 Map<String, Object> configs = new HashMap<>(); configs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); JsonDebeziumDeserializationSchema schema = new JsonDebeziumDeserializationSchema(false, configs); StringBuffer buffer = new StringBuffer(); for (String table : tableList) { buffer.append(database).append(".").append(table).append(","); } buffer = buffer.deleteCharAt(buffer.length() - 1); // b. 数据源 MySqlSource<String> mySqlSource = MySqlSource.<String>builder() .hostname("node101") .port(3306) .databaseList(database) .tableList(buffer.toString()) .username("root") .password("123456" ) .serverId(generateUniqueServerId()) .serverTimeZone("Asia/Shanghai") .startupOptions(StartupOptions.latest()) .debeziumProperties(getDebeziumProperties()) .deserializer(schema) .build(); // c. 读取数据 DataStreamSource<String> stream = env.fromSource( mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source" ); // d. 返回 return stream; } private static Properties getDebeziumProperties(){ Properties properties = new Properties(); properties.setProperty("converters", "dateConverters"); properties.setProperty("dateConverters.type", MySqlDateTimeConverter.class.getName()); properties.setProperty("dateConverters.format.date", "yyyy-MM-dd"); properties.setProperty("dateConverters.format.time", "HH:mm:ss"); properties.setProperty("dateConverters.format.datetime", "yyyy-MM-dd HH:mm:ss"); properties.setProperty("dateConverters.format.timestamp", "yyyy-MM-dd HH:mm:ss"); properties.setProperty("dateConverters.format.timestamp.zone", "UTC+8"); return properties; } /** * 自定义时间转换配置。 */ public static class MySqlDateTimeConverter implements CustomConverter<SchemaBuilder, RelationalColumn> { private final static Logger logger = LoggerFactory.getLogger(MySqlDateTimeConverter.class); private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; private DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_TIME; private DateTimeFormatter datetimeFormatter = DateTimeFormatter.ISO_DATE_TIME; private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; private ZoneId timestampZoneId = ZoneId.systemDefault(); @Override public void configure(Properties props) { readProps(props, "format.date", p -> dateFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.time", p -> timeFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.datetime", p -> datetimeFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.timestamp", p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); readProps(props, "format.timestamp.zone", z -> timestampZoneId = ZoneId.of(z)); } private void readProps(Properties properties, String settingKey, Consumer<String> callback) { String settingValue = (String) properties.get(settingKey); if (settingValue == null || settingValue.isEmpty()) { return; } try { callback.accept(settingValue.trim()); } catch (IllegalArgumentException | DateTimeException e) { logger.error("The {} setting is illegal: {}",settingKey,settingValue); throw e; } } @Override public void converterFor(RelationalColumn column, ConverterRegistration<SchemaBuilder> registration) { String sqlType = column.typeName().toUpperCase(); SchemaBuilder schemaBuilder = null; Converter converter = null; if ("DATE".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertDate; } if ("TIME".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertTime; } if ("DATETIME".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertDateTime; } if ("TIMESTAMP".equals(sqlType)) { schemaBuilder = SchemaBuilder.string().optional().name("debezium.date.string"); converter = this::convertTimestamp; } if (schemaBuilder != null) { registration.register(schemaBuilder, converter); } } private String convertDate(Object input) { if (input == null) return null; if (input instanceof LocalDate) { return dateFormatter.format((LocalDate) input); } if (input instanceof Integer) { LocalDate date = LocalDate.ofEpochDay((Integer) input); return dateFormatter.format(date); } return String.valueOf(input); } private String convertTime(Object input) { if (input == null) return null; if (input instanceof Duration) { Duration duration = (Duration) input; long seconds = duration.getSeconds(); int nano = duration.getNano(); LocalTime time = LocalTime.ofSecondOfDay(seconds).withNano(nano); return timeFormatter.format(time); } return String.valueOf(input); } private String convertDateTime(Object input) { if (input == null) return null; if (input instanceof LocalDateTime) { return datetimeFormatter.format((LocalDateTime) input).replaceAll("T", " "); } return String.valueOf(input); } private String convertTimestamp(Object input) { if (input == null) return null; if (input instanceof ZonedDateTime) { // mysql的timestamp会转成UTC存储,这里的zonedDatetime都是UTC时间 ZonedDateTime zonedDateTime = (ZonedDateTime) input; LocalDateTime localDateTime = zonedDateTime.withZoneSameInstant(timestampZoneId).toLocalDateTime(); return timestampFormatter.format(localDateTime).replaceAll("T", " "); } return String.valueOf(input); } } // ... private static String generateUniqueServerId() throws UnknownHostException { long baseTimestamp = System.currentTimeMillis() / 1000; String hostName; try { hostName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { hostName = "unknown"; } // 使用主机名 + 时间戳生成唯一 serverId int hash = (hostName + baseTimestamp).hashCode(); // 确保为正数并控制在合理范围 return String.valueOf(Math.abs(hash) % 100000000 + 5); } } 4
06-07
public void ruleDeterminationForNuwaOrCCM() { log.info("[RULE-DETERMINATION-CCM/NVWA] 开始对CCM/NVWA进行规则判定"); /** 获取当前时间 */ String nowTime = DateUtils.getTime(); log.info("[RULE-DETERMINATION-CCM/NVWA] 获取当前时间:{}", nowTime); /**创建实例用于查询状态为启用状态的给规则*/ GeWarningRulesListEntity geWarningRulesListEntity = new GeWarningRulesListEntity(); geWarningRulesListEntity.setStatus(ConstantInterface.WARNING_RULES_TRUE); geWarningRulesListEntity.setRulesType(ConstantInterface.WARNING_RULES_NUWACCM_TYPE); log.info("[RULE-DETERMINATION-CCM/NVWA] 创建规则查询实例:状态={},规则类型={}", ConstantInterface.WARNING_RULES_TRUE, ConstantInterface.WARNING_RULES_NUWACCM_TYPE); // 获取可用规则集合 List<GeWarningRulesListEntity> geWarningRulesListEntityList = getRulesByTypeAndIds(DataSourceEnum.NUWA_CCM.getValue()); log.info("[RULE-DETERMINATION-CCM/NVWA] 获取 {} 条规则,类型:{}", geWarningRulesListEntityList != null ? geWarningRulesListEntityList.size() : 0, DataSourceEnum.NUWA_CCM.getValue()); // 如果没有可用的规则信息,直接结束方法 if (CollectionUtils.isEmpty(geWarningRulesListEntityList)) { log.info("[RULE-DETERMINATION-CCM/NVWA] 未找到规则,退出方法"); return; } /** 遍历规则中的条件 筛选出符合条件的数据 若存在 则记录数据到 数据表③ */ /** 组装条件 查询表①符合条件的数据 */ geWarningRulesListEntityList.stream() .filter(rules -> !CollectionUtils.isEmpty(rules.getCsdcCondition())) /** 判定当前规则是否处于开启状态*/ .filter(rules -> ConstantInterface.WARNING_RULES_TRUE.equals(rules.getStatus())) .forEach(rules -> { log.info("[RULE-DETERMINATION-CCM/NVWA] 处理规则ID:{},规则名称:{}", rules.getId(), rules.getName()); StringBuffer stringBuffer = new StringBuffer(" 1=1 "); /** 获取规则配置的systemid */ String systemId = rules.getSystemId(); /** 获取规则配置的systemid 在保状态 */ String insuranceType = rules.getInsuranceType(); /** 获取规则配置的modality */ String modality = rules.getModality(); /** 获取规则配置的product */ String product = rules.getProduct(); /** 获取规则配置的hospitalName */ String hospitalName = rules.getHospitalName(); log.info("[RULE-DETERMINATION-CCM/NVWA] 规则ID {} 的条件:systemId={},insuranceType={},modality={},product={},hospitalName={}", rules.getId(), StringUtils.defaultIfBlank(systemId, "null"), StringUtils.defaultIfBlank(insuranceType, "null"), StringUtils.defaultIfBlank(modality, "null"), StringUtils.defaultIfBlank(product, "null"), StringUtils.defaultIfBlank(hospitalName, "null")); /** 如果规则的systemId属于无效参数,说明要求数据全匹配,直接返回true*/ if (StringUtils.isNotBlank(systemId)) { stringBuffer.append("and system_id= '" + systemId + "' "); } /** 如果规则的systemId属于无效参数,说明要求数据全匹配,直接返回true*/ if (StringUtils.isNotBlank(insuranceType)) { switch (insuranceType) { case ConstantInterface.WARNING_RULES_SYSTEM_ASSET_STATUS_TRUE: stringBuffer.append("and asset_status in ( '" + ConstantInterface.WARNING_RULES_SYSTEM_ASSET_STATUS_TRUE_IW + "' , '" + ConstantInterface.WARNING_RULES_SYSTEM_ASSET_STATUS_TRUE_IC + "') "); break; case ConstantInterface.WARNING_RULES_SYSTEM_ASSET_STATUS_ALL: /** 默认显示为ALL 则不加筛选条件 */ break; case ConstantInterface.WARNING_RULES_SYSTEM_ASSET_STATUS_FALSE: stringBuffer.append("and asset_status in ( '" + ConstantInterface.WARNING_RULES_SYSTEM_ASSET_STATUS_FALSE_OC + "' , '" + ConstantInterface.WARNING_RULES_SYSTEM_ASSET_STATUS_FALSE_OW + "') "); break; } } /** 如果规则的modality属于无效参数,说明要求数据全匹配,直接返回true*/ if (StringUtils.isNotBlank(modality)) { stringBuffer.append("and modality= '" + modality + "' "); } /** 如果规则的product属于无效参数,说明要求数据全匹配,直接返回true*/ if (StringUtils.isNotBlank(product)) { stringBuffer.append("and product= '" + product + "' "); } /** 如果规则的hospitalName属于无效参数,说明要求数据全匹配,直接返回true*/ if (StringUtils.isNotBlank(hospitalName)) { stringBuffer.append("and hospital_name= '" + hospitalName + "' "); } log.info("[RULE-DETERMINATION-CCM/NVWA] 为规则ID {} 构建基础SQL条件:{}", rules.getId(), stringBuffer.toString()); /** 获取当前规则中的所有条件数据 */ List<CsdcConditionTableEntity> csdcCondition = rules.getCsdcCondition(); for (int index = csdcCondition.size() - 1; index >= 0; index--) { StringBuffer sqlNew = new StringBuffer(stringBuffer); CsdcConditionTableEntity condition = csdcCondition.get(index); /** 匹配数据源 Nuwa/CCM */ if (StringUtils.isNotBlank(condition.getDataSource())) { sqlNew.append("and data_source= '" + condition.getDataSource() + "' "); } /** 匹配 channel字段名称 */ String conditionField = condition.getConditionField(); if (StringUtils.isNotBlank(conditionField)) { sqlNew.append("and channel_name= '" + condition.getConditionField() + "' "); } /** 判定当前条件是 关键字还是上下限 */ Integer conditionType = condition.getConditionType(); /** 获取条件参数 */ String matchMethod = condition.getMatchMethod(); /** 格局不同情况进行 条件的整理判定 */ if (conditionType != null) { sqlNew.append("and ( "); switch (conditionType) { case ConstantInterface.WARNING_CONDITION_NUMBER_TYPE: /** 数字匹配上下限 获取以保存的关系表达式 */ String value = matchMethod.replace(conditionField, "`value` "); sqlNew.append(value); break; case ConstantInterface.WARNING_CONDITION_KEYWORD_TYPE: /** 温度 包含 开,合 or 温度 不包含 关 */ String sqlCondition = parseToSQLCondition(matchMethod.replace(conditionField, "`value`")); sqlNew.append(sqlCondition); break; case ConstantInterface.WARNING_CONDITION_ALGORITHM_TYPE: /** 算法类型数值计算匹配 todo */ // String sqlCondition = parseToSQLCondition(matchMethod.replace(conditionField, "`value`")); // sqlNew.append(sqlCondition); break; } sqlNew.append(" ) "); } /** 连续 or 单次 */ Integer executionType = condition.getExecutionType(); /** 若果是连续 则去 时间区间/10 获取次数条件 反之 根据字段threshold_count 来判定 */ Long thresholdTime = condition.getThresholdTime(); if (thresholdTime != null) { /** 转换当前时间 为截止时间 */ LocalDateTime endDate = LocalDateTime.parse(nowTime, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); /** 通过计算 获取起始时间 */ LocalDateTime statrtDate = endDate.minus(thresholdTime, ChronoUnit.MINUTES); sqlNew.append(" AND STR_TO_DATE(timestamp, '%Y-%m-%dT%H:%i:%s') "); sqlNew.append(" BETWEEN '" + statrtDate.format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss")) + "' AND '" + endDate.format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss")) + "' "); log.info("[RULE-DETERMINATION-CCM/NVWA] 为条件ID {} 添加时间范围:开始时间:{},结束时间:{}", condition.getConditionId(), statrtDate, endDate); } Long thresholdCount = condition.getThresholdCount(); /** 判定 连续或者单次时 数据出现的次数 */ if (executionType != null && executionType == ConstantInterface.WARNING_CONDITION_EXECUTION_CONTINUOUS && thresholdTime != null) { thresholdCount = thresholdTime / ConstantInterface.WARNING_CONDITION_EXECUTION_TASK; } sqlNew.append(" GROUP BY system_id HAVING COUNT(*) >= " + thresholdCount); log.info("[RULE-DETERMINATION-CCM/NVWA] 处理条件索引:{},条件ID:{},规则ID:{},匹配方法:{},条件参数:{},执行类型:{},阈值计数:{},最终SQL:{}", index, condition.getConditionId(), rules.getId(),conditionType, matchMethod, executionType, thresholdCount,sqlNew.toString()); /** todo 待定 ing */ List<CCMDataEntity> dataEntities = ccmDataEntityMapper.selectNuwaCCMDataListByRule(sqlNew.toString()); log.info("[RULE-DETERMINATION-CCM/NVWA] 为规则ID:{},条件ID:{} 获取 {} 条CCMDataEntity记录", dataEntities != null ? dataEntities.size() : 0, rules.getId(), condition.getConditionId()); if (!CollectionUtils.isEmpty(dataEntities)) { /** 创建一个集合 记录本次符合条件的数据 */ List<ConditionsMeetsEntity> ruleConditionsMeetsEntities = new ArrayList<ConditionsMeetsEntity>(); /** 数据满足的条件的规则id */ String rulesId = rules.getId(); /** 数据满足的条件id */ Long conditionId = condition.getConditionId(); log.info("[RULE-DETERMINATION-CCM/NVWA] 准备为规则ID:{},条件ID:{} 存储 {} 条匹配记录", dataEntities.size(), rulesId, conditionId); /** ********* */ for (CCMDataEntity dataEntity : dataEntities) { /** 当前数据systemid */ String systemId1 = dataEntity.getSystemId(); /** 当前数据 TS时间 */ String timestamp = dataEntity.getTimestamp(); /** 初始化需要存储的数据 */ ConditionsMeetsEntity ruleConditionsMeets = ConditionsMeetsEntity.builder() .systemId(systemId1) .conditionId(conditionId.toString()) .ruleId(rulesId) .matchTime(timestamp).build(); ruleConditionsMeetsEntities.add(ruleConditionsMeets); log.info("[RULE-DETERMINATION-CCM/NVWA] 添加ConditionsMeetsEntity:systemId={},ruleId={},conditionId={},matchTime={}", systemId1, rulesId, conditionId, timestamp); } /** 存储符合条件的数据 */ try { conditionsMeetsEntityMapper.insertConditionsMeetsEntityForList(ruleConditionsMeetsEntities); log.info("[RULE-DETERMINATION-CCM/NVWA] 为规则ID:{},条件ID:{} 插入 {} 条ConditionsMeetsEntity记录", ruleConditionsMeetsEntities.size(), rulesId, conditionId); } catch (Exception e) { log.error("[RULE-DETERMINATION-CCM/NVWA] Failed to insert ConditionsMeetsEntity for rule ID: {}, conditionId: {}, error: {}", rulesId, conditionId, e.getMessage(), e); throw e; } } } /** 当条件判定直线完 需要进行 匹配符合条件间隔的数据 */ /** 定义一个秒 用来计算 */ Long seconds = 0L; /** 条件间隔时间-小时 */ // String parameterIntervalHours = rules.getParameterIntervalHours(); // if (StringUtils.isNoneBlank(parameterIntervalHours)) { // seconds += Long.valueOf(parameterIntervalHours) * 60 * 60; // } /** * 条件间隔时间-分钟 */ String parameterIntervalMinutes = rules.getParameterIntervalMinutes(); if (StringUtils.isNoneBlank(parameterIntervalMinutes)) { seconds += Long.valueOf(parameterIntervalMinutes) * 60; } /** * 条件间隔时间-秒 */ // String parameterIntervalSeconds = rules.getParameterIntervalSeconds(); // if (StringUtils.isNoneBlank(parameterIntervalSeconds)) { // seconds += Long.valueOf(parameterIntervalSeconds); // } log.info("[RULE-DETERMINATION-CCM/NVWA] 为规则ID:{} 计算间隔时间:{} 秒", rules.getId(), seconds); /** 转换当前时间 为截止时间 */ LocalDateTime endDate = LocalDateTime.parse(nowTime, DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); /** 通过计算 获取起始时间 */ LocalDateTime statrtDate = endDate.minus(seconds, ChronoUnit.SECONDS); log.info("[RULE-DETERMINATION-CCM/NVWA] 规则ID {} 的时间范围:开始时间:{},结束时间:{}", rules.getId(), statrtDate, endDate); /** 当 当前规则中 满足条件的数据都存储到 满足条件数据表中时 进行 规则维度 数据判定*/ conditionsMeetsDataForRulesToCCMNuwa(statrtDate, endDate, csdcCondition.size(), rules); log.info("[RULE-DETERMINATION-CCM/NVWA] 为规则ID:{} 执行conditionsMeetsDataForRulesToCCMNuwa", rules.getId()); }); /** 当规则数据都判定之后 进行 对符合规则的数据匹配去重原则 */ operationsRulesMeetsData(geWarningRulesListEntityList, nowTime); log.info("[RULE-DETERMINATION-CCM/NVWA] 执行operationsRulesMeetsData,规则数量:{}", geWarningRulesListEntityList.size()); log.info("[RULE-DETERMINATION-CCM/NVWA] CCM/NVWA规则判定完成"); } 解析代码 分析业务逻辑
最新发布
12-04
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值