深入理解 Flink 中的 .name() 和 .uid() 方法

在 Apache Flink 中,.name().uid() 是两个常用的配置方法。虽然它们看起来相似,但它们各自有着不同的功能和用途,理解这两个方法的区别和各自的应用场景,能够帮助开发者更好地管理 Flink 作业,提升作业的可读性、可维护性和容错性。

本文将详细讲解 .name().uid() 的作用、用途以及如何在实际开发中正确使用它们。

1.name() 方法:为操作命名

1.1. 作用

.name() 方法的作用是为 Flink 中的算子(如数据源、转换操作、Sink 等)设置一个可读的名称。这个名称主要用于提升代码的可读性、调试时的便利性以及作业监控中的可视化效果。

1.2. 用途

  • 调试与监控:在 Flink 作业的 Web UI 中,操作的名称将作为标识,帮助开发者和运维人员快速定位和识别作业中的具体操作。当出现作业性能问题或作业失败时,明确的名称可以帮助定位问题的根源。

  • 代码可读性:为每个操作设置一个合适的名称,可以让代码逻辑更加清晰,避免对不同算子的混淆。特别是在复杂的作业中,合适的名称能帮助后续开发人员更快速地理解作业逻辑。

1.3. 示例

假设我们有一个从 Kafka 读取数据的 Source 操作,我们可以通过 .name() 方法为其设置一个易于理解的名称:

DataStream<String> stream = env.addSource(new FlinkKafkaConsumer<>(...))
                             
提供 // 处理温湿度数据 SingleOutputStreamOperator<PscOriginData.PropertiesDTO> psdTemperatureHumidityData = pscPsdDataMapperOperator.filter(psdStatusFilter); SingleOutputStreamOperator psdTemperatureHumidityDataOperator = psdTemperatureHumidityData.process(psdTemperatureHumidityProcessor); psdTemperatureHumidityDataOperator.addSink(psdTemperatureHumiditySink);package com.kangni.flink.psd.processor; import java.util.Map; import java.util.stream.Collectors; import cn.hutool.json.JSONUtil; import com.kangni.flink.common.util.StringUtil; import com.kangni.flink.psd.config.PlatformDoorConfig; import com.kangni.flink.psd.model.dataprocess.dbdata.PsdDevice; import com.kangni.flink.psd.model.dataprocess.dbdata.*; import com.kangni.flink.psd.model.dataprocess.dbdata.StationInfo; import com.kangni.flink.psd.model.door.StationInfoRequest; import com.kangni.flink.common.util.SpringUtil; import com.kangni.flink.common.enums.DateFormatEnum; import com.kangni.flink.psd.cache.DoorInfosCache; import com.kangni.flink.psd.model.psc.PscOriginData; import lombok.extern.slf4j.Slf4j; import org.apache.flink.shaded.guava30.com.google.common.cache.LoadingCache; import org.apache.flink.streaming.api.functions.ProcessFunction; import org.apache.flink.util.Collector; import org.springframework.stereotype.Component; import java.util.Date; import java.util.List; /** @ClassName PsdTemperatureHumidityProcessor @Description @Author lixy @Date 2025/09/1 10:23 @Version 1.0 */ @Slf4j @Component public class PsdTemperatureHumidityProcessor extends ProcessFunction<PscOriginData.PropertiesDTO, PsdTemperatureHumidityDataDTO> { @Override public void processElement(PscOriginData.PropertiesDTO value, ProcessFunction<PscOriginData.PropertiesDTO, PsdTemperatureHumidityDataDTO>.Context context, Collector collector) throws Exception { try { // 获取缓存配置服务 DoorInfosCache doorInfosCache = SpringUtil.getBean(DoorInfosCache.class); PscOriginData pscOriginData = SpringUtil.getBean(PscOriginData.class); // 提前校验并获取必要属性,避免循环内重复操作 // 提取PscOriginData中重复使用的属性 PscOriginData.Propertiese propertiese = pscOriginData.getPropertiese(); String messageId = pscOriginData.getMessageId(); Long timestamp = pscOriginData.getTimestamp(); String equipId = propertiese.getEquipid(); Double tempValue = propertiese.getTempvalue(); String tempUnit = propertiese.getTempunit(); Double humValue = propertiese.getHumvalue(); String humUnit = propertiese.getHumunit(); if (propertiese == null) { log.error(“温湿度数据属性为空,pscOriginData:{}”, JSONUtil.toJsonStr(pscOriginData)); return; } LoadingCache<String, Equipment> equipmentLoadingCache = doorInfosCache.getEquipmentLoadingCache(); String deviceId = pscOriginData.getDeviceId(); Equipment equipment; try { // 缓存中获取设备信息 equipment = equipmentLoadingCache.get(deviceId); } catch (Exception e) { log.error(“设备不存在,设备id:{}”, deviceId); return; } // 获取设备列表信息 List psdDevices = doorInfosCache.getPsdDeviceStationLoadingCache().get(value.getDevSn()); if (psdDevices == null || psdDevices.isEmpty()) { log.error(“找不到主站编号对应设备 masterSn={}”, value.getDevSn()); return; } // 构建设备映射表,方便快速查询 Map<String, PsdDevice> psdDeviceMap = psdDevices.stream() .filter(item -> item.getDoortype1() == 1) // 过滤有效门类型 .collect(Collectors.toMap(PsdDevice::getDevicesn, device -> device)); // 获取站点信息 Integer lineId = psdDevices.get(0).getLineid(); Integer stationId = psdDevices.get(0).getStationid(); StationInfoRequest stationInfoRequest = StationInfoRequest.builder() .lineId(lineId) .StationId(stationId) .build(); StationInfo stationInfo = doorInfosCache.getStationInfoLoadingCache() .get(JSONUtil.toJsonStr(stationInfoRequest)); // 处理每个子设备的温湿度数据 // 时间相关处理提前 Long currentTimeMillis = System.currentTimeMillis(); Date currentDate = new Date(currentTimeMillis); String frameTime = timestamp != null ? DateFormatEnum.YYYY_MM_DD_HH_MM_SS.format(timestamp) : DateFormatEnum.YYYY_MM_DD_HH_MM_SS.format(currentTimeMillis); Long id = StringUtil.getIdFromMessageId(messageId); for (PscOriginData.ChildDTO child : value.getChild()) { // 查找对应的设备信息 PsdDevice psdDevice = psdDeviceMap.get(child.getDevNo()); if (psdDevice == null) { log.error(“找不到门编号对应的设备信息 masterSn={}, deviceSn={}”, value.getDevSn(), child.getDevNo()); continue; } // 处理温湿度数据 List tempData = child.getTempData(); if (tempData == null || tempData.size() < 2) { log.error(“温湿度数据不完整,deviceSn={}, tempData={}”, child.getDevNo(), JSONUtil.toJsonStr(tempData)); continue; } // 构建DTO对象 PsdTemperatureHumidityDataDTO dataDTO = PsdTemperatureHumidityDataDTO.builder() .id(id) .messageId(messageId) .partnerId(equipment.getPartnerId()) .partnerName(equipment.getPartnerName()) .areaId(equipment.getAreaId()) .areaName(equipment.getAreaName()) .lineId(equipment.getLineId()) .lineName(equipment.getLineName()) .stationId(equipment.getStationId()) .stationName(equipment.getStationName()) .deviceName(equipment.getEquipmentName()) .deviceId(deviceId) .equipId(equipId) .tempValue(tempValue) .tempUnit(tempUnit) .humValue(humValue) .humUnit(humUnit) .dateTime(timestamp) .frameTime(frameTime) .createdBy(“system”) .createdAt(currentDate) .createdTimestamp(currentTimeMillis) .updatedBy(“system”) .updatedAt(currentDate) .updatedTimestamp(currentTimeMillis) .build(); collector.collect(dataDTO); } } catch (Exception e) { log.error(“处理PSD温湿度数据失败,原始数据: {}”, JSONUtil.toJsonStr(value), e); } } /** 根据消息ID设备编号生成唯一ID */ /private Long generateId(String messageId, String deviceSn) { // 实际应用中可以使用更可靠的ID生成策略 return (messageId.hashCode() & 0x7FFFFFFF) + (long) deviceSn.hashCode(); }/ } package com.kangni.flink.psd.sink; import cn.hutool.json.JSONUtil; import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper; import com.kangni.flink.common.enums.*; import com.kangni.flink.common.kafka.KafkaComponent; import com.kangni.flink.common.util.SpringUtil; import com.kangni.flink.psd.mapper.DeviceFaultRecordsMapper; import com.kangni.flink.psd.mapper.MaintenancePlanMapper; import com.kangni.flink.psd.model.dataprocess.dbdata.FaultLevel; import com.kangni.flink.psd.model.dataprocess.dbdata.FaultLibrary; import com.kangni.flink.psd.model.dataprocess.dbdata.MaintenancePlan; import com.kangni.flink.psd.model.dataprocess.dbdata.PsdTemperatureHumidityDataDTO; import com.kangni.flink.psd.cache.DoorInfosCache; import com.kangni.flink.psd.config.PsdConfig; import com.kangni.flink.psd.model.dataprocess.dbdata.DeviceFaultRecords; import com.kangni.flink.psd.enums.PropertiesType; import com.kangni.flink.psd.mapper.PsdTemperatureHumidityDataMapper; import com.kangni.flink.psd.model.dataprocess.dbdata.PsdTemperatureHumidityThresholds; import com.kangni.flink.psd.model.fault.FaultLibraryData; import lombok.extern.slf4j.Slf4j; import org.apache.flink.configuration.Configuration; import org.apache.flink.shaded.guava30.com.google.common.cache.LoadingCache; import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; import org.redisson.api.RLock; import org.redisson.api.RedissonClient; import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate; import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates; import org.springframework.stereotype.Service; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; /** @ClassName PsdTemperatureHumiditySink @Description @Author LIXY @Date 2025/9/1 11:38 @Version 1.0 */ @Slf4j @Service public class PsdTemperatureHumiditySink extends RichSinkFunction { @Override public void open(Configuration parameters) throws Exception { super.open(parameters); } @Override public void close() throws Exception { super.close(); } @Override public void invoke(PsdTemperatureHumidityDataDTO value, Context context) throws Exception { super.invoke(value, context); long startTime = System.currentTimeMillis(); try { invokeClickHouse(value); } catch (Exception e) { log.error(“处理温湿度数据失败”, e); return; } log.info(“PsdTemperatureHumidityDataDTO CK落库耗时:{}ms”, System.currentTimeMillis() - startTime); startTime = System.currentTimeMillis(); invokeElasticSearch(value); log.info(“PsdTemperatureHumidityDataDTO ES落库耗时:{}ms”, System.currentTimeMillis() - startTime); } private void invokeClickHouse(PsdTemperatureHumidityDataDTO data) throws InterruptedException { RLock lock = SpringUtil.getBean(RedissonClient.class).getLock(“temHum_fault” + data.getDeviceId()); lock.tryLock(5, 5, TimeUnit.SECONDS); // 获取阈值 PsdTemperatureHumidityThresholds thresholds = getThreshold(data.getDeviceId()); if (thresholds != null) { if (thresholds.getUpperTemperatureThreshold() != null && data.getTempValue() != null) { DeviceFaultRecords lastFault = queryLastFault(data.getDeviceId(), PropertiesType.TEMPERATURE_HIGH_THAN_MAX.getTopic()); if (data.getTempValue().compareTo(thresholds.getUpperTemperatureThreshold()) > 0) { // 温度超过阈值上限,没有历史故障则新增故障 if (lastFault == null) { insertFault(data, PropertiesType.TEMPERATURE_HIGH_THAN_MAX.getTopic()); } } else { // 温度不超过阈值上限,有历史故障则消除故障 if (lastFault != null) { eliminateFault(data, lastFault); } } } if (thresholds.getLowerTemperatureThreshold() != null && data.getTempValue() != null) { DeviceFaultRecords lastFault = queryLastFault(data.getDeviceId(), PropertiesType.TEMPERATURE_LOW_THAN_MIN.getTopic()); if (data.getTempValue().compareTo(thresholds.getLowerTemperatureThreshold()) < 0) { // 温度低于阈值下限,没有历史故障则新增故障 if (lastFault == null) { insertFault(data, PropertiesType.TEMPERATURE_LOW_THAN_MIN.getTopic()); } } else { // 温度不低于阈值下限,有历史故障则消除故障 if (lastFault != null) { eliminateFault(data, lastFault); } } } if (thresholds.getUpperHumidityThreshold() != null && data.getHumValue() != null) { DeviceFaultRecords lastFault = queryLastFault(data.getDeviceId(), PropertiesType.TEMPERATURE_HIGH_THAN_MAX.getTopic()); if (data.getHumValue().compareTo(thresholds.getUpperHumidityThreshold()) > 0) { // 湿度超过阈值上限,没有历史故障则新增故障 if (lastFault == null) { insertFault(data, PropertiesType.TEMPERATURE_HIGH_THAN_MAX.getTopic()); } } else { // 湿度不超过阈值上限,有历史故障则消除故障 if (lastFault != null) { eliminateFault(data, lastFault); } } } if (thresholds.getLowerHumidityThreshold() != null && data.getHumValue() != null) { DeviceFaultRecords lastFault = queryLastFault(data.getDeviceId(), PropertiesType.TEMPERATURE_LOW_THAN_MIN.getTopic()); if (data.getHumValue().compareTo(thresholds.getLowerHumidityThreshold()) < 0) { // 湿度低于阈值下限,没有历史故障则新增故障 if (lastFault == null) { insertFault(data, PropertiesType.TEMPERATURE_LOW_THAN_MIN.getTopic()); } } else { // 湿度不低于阈值下限,有历史故障则消除故障 if (lastFault != null) { eliminateFault(data, lastFault); } } } } // 插入温湿度数据 PsdTemperatureHumidityDataMapper temperatureHumidityDataMapper = SpringUtil.getBean(PsdTemperatureHumidityDataMapper.class); temperatureHumidityDataMapper.insert(data); lock.unlock(); } private void invokeElasticSearch(PsdTemperatureHumidityDataDTO data) { String yearMonth = DateFormatEnum.YYYY_MM.format(data.getDateTime()); // 构造设备按月索引 String indexName = PropertiesType.PROPERTIES_TYPE_PSC_TEMP.getTopic() + “_” + yearMonth; ElasticsearchRestTemplate esTemplate = SpringUtil.getBean(ElasticsearchRestTemplate.class); IndexCoordinates indexCoordinates = IndexCoordinates.of(indexName); esTemplate.save(data, indexCoordinates); } /** @return com.kangni.flink.room.model.TemperatureHumidityThresholds @description 从缓存中获取温湿度阈值 @params [] @author lixy @date 2025/9/1 10:00 */ private PsdTemperatureHumidityThresholds getThreshold(String EquipmentId) { PsdConfig psdConfig = SpringUtil.getBean(PsdConfig.class); HashMap<String, PsdTemperatureHumidityThresholds> temperatureHumidityThresholdsMap = psdConfig.getTemperatureHumidityThresholdsMap(); return temperatureHumidityThresholdsMap.get(EquipmentId); } /** @return com.kangni.flink.room.dto.DeviceFaultRecordsDTO @description 查询设备最新一条指定类型的故障 @params [deviceId, faultCode] @author tengfr @date 2025/6/9 10:49 */ private DeviceFaultRecords queryLastFault(String deviceId, String faultCode) { com.kangni.flink.psd.mapper.DeviceFaultRecordsMapper deviceFaultRecordsMapper = SpringUtil.getBean(com.kangni.flink.psd.mapper.DeviceFaultRecordsMapper.class); LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper<>(); queryWrapper.eq(DeviceFaultRecords::getDeviceSn, deviceId) .eq(DeviceFaultRecords::getFaultCode, faultCode) .eq(DeviceFaultRecords::getFaultState, DeviceFaultRecordEnum.FAULT_STATE_HAPPEN.getValue()) .orderByDesc(DeviceFaultRecords::getHappenAt) .last(“LIMIT 1”); return deviceFaultRecordsMapper.selectOne(queryWrapper); } /** @return void @description 插入温湿度故障 @params [data, faultCode] @author tengfr @date 2025/6/9 9:29 */ private void insertFault(PsdTemperatureHumidityDataDTO data, String faultCode) { com.kangni.flink.psd.mapper.DeviceFaultRecordsMapper recordMapper = SpringUtil.getBean(DeviceFaultRecordsMapper.class); MaintenanceTimeComponent maintenanceTimeComponent = SpringUtil.getBean(MaintenanceTimeComponent.class); DoorInfosCache cache = SpringUtil.getBean(DoorInfosCache.class); LoadingCache<String, FaultLibrary> faultLibraryLoadingCache = cache.getFaultLibraryCache(); LoadingCache<Integer, FaultLevel> faultLevelLoadingCache = cache.getFaultLevelLoadingCache(); FaultLibrary faultLibrary = null; FaultLevel faultLevel = null; try { faultLibrary = faultLibraryLoadingCache.get(faultCode); faultLevel = faultLevelLoadingCache.get(faultLibrary.getLevelId()); } catch (Exception e) { log.error(“获取故障库温湿度异常失败”, e); } int isInMaintenanceTime = maintenanceTimeComponent.checkMaintenanceTime( data.getLineId(), data.getStationId(), SubSystemEnum.EQUIPMENT_ROOM.getFlag(), DateFormatEnum.YYYY_MM_DD_HH_MM_SS.parseData(data.getFrameTime())) ? MaintenancePlanTimeEnum.IN_MAINTENANCE_PLAN_TIME.getStatus() : MaintenancePlanTimeEnum.NOT_IN_MAINTENANCE_PLAN_TIME.getStatus(); DeviceFaultRecords record = DeviceFaultRecords.builder() .countryId(1) .countryName(“中国”) .happenMessageId(Long.toString(data.getId())) .subSystem(SubSystemEnum.TEMPERATURE_HUMIDITY.getFlag()) .deviceName(data.getDeviceName()) .masterSn(null) .deviceSn(data.getDeviceId()) .side(null) .deviceId(null) .categoryId(0) .lineId(data.getLineId()) .lineName(data.getLineName()) .areaId(data.getAreaId()) .areaName(data.getAreaName()) .partnerId(data.getPartnerId()) .partnerName(data.getPartnerName()) .stationId(data.getStationId()) .stationName(data.getStationName()) .faultSourceId(null) .faultName(faultLibrary != null ? faultLibrary.getFaultName() : null) .faultDescription(faultLibrary != null ? faultLibrary.getFaultDesc() : null) .faultCode(faultLibrary != null ? faultLibrary.getFaultCode() : null) .upDown(null) .levelId(faultLibrary != null ? faultLibrary.getLevelId() : null) .levelName(faultLevel != null ? faultLevel.getLevelName() : null) .happenAt(DateFormatEnum.YYYY_MM_DD_HH_MM_SS.parseData(data.getFrameTime())) .createdAt(new Date()) .createdBy(“0”) .updatedAt(new Date()) .updatedBy(“0”) .faultType(FaultTypeEnum.FAULT.getType()) .faultState(FaultStateEnum.HAPPEN.getValue()) .isAcked(AlarmAckEnum.ACK.getStatus()) .isInMaintenanceTime(isInMaintenanceTime) .isInOperatingTime(OperatingTimeEnum.IN_OPERATING_TIME.getStatus()) .build(); int rows = recordMapper.insert(record); if (rows <= 0) { log.error(“插入温湿度故障失败”); } //运营时间内+非检修计划时间+ack已经确认 故障确认 KafkaComponent kafkaComponent = SpringUtil.getBean(KafkaComponent.class); kafkaComponent.confirmFault(record.getLineId(), record.getId(), record.getIsInOperatingTime(), record.getIsInMaintenanceTime(), record.getIsAcked()); } /** @return void @description 消除故障 @params [data, lastFault] @author tengfr @date 2025/6/9 11:58 */ private void eliminateFault(PsdTemperatureHumidityDataDTO data, DeviceFaultRecords lastFault) { com.kangni.flink.psd.mapper.DeviceFaultRecordsMapper recordMapper = SpringUtil.getBean(com.kangni.flink.psd.mapper.DeviceFaultRecordsMapper.class); // 消除故障 LambdaUpdateWrapper updateWrapper = new LambdaUpdateWrapper<>(); updateWrapper.eq(DeviceFaultRecords::getId, lastFault.getId()) .set(DeviceFaultRecords::getFaultState, FaultStateEnum.ELIMINATED.getValue()) .set(DeviceFaultRecords::getEliminatedBy, 0) .set(DeviceFaultRecords::getEliminatedAt, DateFormatEnum.YYYY_MM_DD_HH_MM_SS.parseData(data.getFrameTime())) .set(DeviceFaultRecords::getUpdatedAt, new Date()) .set(DeviceFaultRecords::getEliminatedMessageId, Long.toString(data.getId())); int update = recordMapper.update(updateWrapper); if (update <= 0) { log.error(“消除温湿度故障失败”); } } public class MaintenanceTimeComponent { /** * @return boolean * @description 判断是否在检修时间内,0不是,1是 * @params [lineId, stationId, subSystem, happenAt] * @author lixy * @date 2025/9/1 18:29 */ public boolean checkMaintenanceTime(Integer lineId, Integer stationId, Integer subSystem, Date happenAt) { MaintenancePlanMapper mapper = SpringUtil.getBean(MaintenancePlanMapper.class); LambdaQueryWrapper wrapper = new LambdaQueryWrapper<>(); wrapper.eq(MaintenancePlan::getLineId, lineId); wrapper.eq(MaintenancePlan::getStationId, stationId); wrapper.eq(MaintenancePlan::getSubSystem, subSystem); wrapper.eq(MaintenancePlan::getIsDel, 0); wrapper.eq(MaintenancePlan::getPlanFlag, MaintenancePlanEnum.MP_UN_FINISH.getType()); List list = mapper.selectList(wrapper); boolean ret = false; for (MaintenancePlan plan : list ) { if (!ret && happenAt.after(plan.getStartTime()) && happenAt.before(plan.getEndTime())) { ret = true; } // 说明已经检修时间结束了 if (plan.getEndTime().before(new Date())) { plan.setPlanFlag(MaintenancePlanEnum.MP_FINISH.getType()); plan.setCloseBy(0); plan.setCloseTime(new Date()); mapper.updateById(plan); } } return ret; } } } public class PscOriginData { private HeadersDTO headers; private String messageType; private String messageId; private String deviceId; private PropertiesDTO properties; private Long timestamp; @JsonProperty(“extProperties”) private Propertiese propertiese; @NoArgsConstructor @Data public static class HeadersDTO { private String deviceName; private String productName; private String productId; @JsonProperty(“_uid”) private String uid; } @NoArgsConstructor @Data public static class PropertiesDTO { private String productId; private String event; private String dateTime; private String devSn; private Integer dataNum; private String messageId; private String kafkaMessageId; private String version; private List child; } @NoArgsConstructor @Data public static class ChildDTO { private String devNo; private String frameTime; private List realTimeData; private List tempData; } package com.kangni.flink.psd.model.dataprocess.dbdata; import com.baomidou.mybatisplus.annotation.TableName; import lombok.Builder; import lombok.Data; import org.springframework.data.annotation.Id; import java.util.Date; /** psd温湿度数据表 @TableName room_temperature_humidity_data / @TableName(value =“room_temperature_humidity_data”) @Data @Builder public class PsdTemperatureHumidityDataDTO { /* 数据id */ @Id private Long id; /** 消息id */ private String messageId; /** 合作方id */ private Integer partnerId; /** 合作方名称 */ private String partnerName; /** 地区id */ private Integer areaId; /** 地区名称 */ private String areaName; /** 线路id */ private Integer lineId; /** 线路名称 */ private String lineName; /** 站点id */ private Integer stationId; /** 站点名称 */ private String stationName; /** 设备名称 */ private String deviceName; /** 设备id */ private String deviceId; /** 上报设备id(拓能上报数据设备的id) */ private String equipId; /** 温度值 */ private Double tempValue; /** 温度单位 */ private String tempUnit; /** 湿度值 */ private Double humValue; /** 湿度单位 */ private String humUnit; /** 上报时间戳(数据无上报时间,使用平台层接收时间) */ private Long dateTime; /** 上报时间(yyyy-MM-dd HH:mm:ss) */ private String frameTime; /** 创建者 */ private String createdBy; /** 创建时间 */ private Date createdAt; /** 创建时间(时间戳) */ private Long createdTimestamp; /** 更新者 */ private String updatedBy; /** 更新时间 */ private Date updatedAt; /** 更新时间(时间戳) */ private Long updatedTimestamp; } 写出完整代码psdTemperatureHumidityProcessorpsdTemperatureHumiditySink实现温湿度的数据解析处理,保存到对应数据库中,完整实现的代码是什么,参考 // 处理温湿度数据 //psc事件数据(PSC事件) SingleOutputStreamOperator<PscOriginData.PropertiesDTO> pscStatusOperator = pscPsdDataMapperOperator.filter(pscStatusFilter); SingleOutputStreamOperator<List> pscBufferOperator = pscStatusOperator.process(pscEventProcessor); pscBufferOperator.addSink(pscDataSink);,也给出 // 处理温湿度数据 SingleOutputStreamOperator<PscOriginData.PropertiesDTO> psdTemperatureHumidityData = pscPsdDataMapperOperator.filter(psdStatusFilter); SingleOutputStreamOperator psdTemperatureHumidityDataOperator = psdTemperatureHumidityData.process(psdTemperatureHumidityProcessor); psdTemperatureHumidityDataOperator.addSink(psdTemperatureHumiditySink);这部分代码,参以下代码package com.kangni.flink.psd.processor; import cn.hutool.json.JSONException; import cn.hutool.json.JSONUtil; import com.kangni.flink.common.util.SpringUtil; import com.kangni.flink.common.util.StringUtil; import com.kangni.flink.common.util.TimeStringUtil; import com.kangni.flink.psd.cache.DoorInfosCache; import com.kangni.flink.psd.model.dataprocess.dbdata.CurrentSensorEventDTO; import com.kangni.flink.psd.model.dataprocess.dbdata.Equipment; import com.kangni.flink.psd.model.door.CurrentSensorEventData; import lombok.extern.slf4j.Slf4j; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.shaded.guava30.com.google.common.cache.LoadingCache; import org.apache.flink.streaming.api.functions.ProcessFunction; import org.apache.flink.util.Collector; import org.springframework.stereotype.Component; import java.util.Date; /** * @ClassName ThermalImagerEventProcessor * @Description * @Author tengfr * @Date 2025/7/6 20:36 * @Version 1.0 */ @Slf4j @Component public class CurrentSensorEventProcessor extends ProcessFunction<Tuple2<String, String>, CurrentSensorEventDTO> { @Override public void processElement(Tuple2<String, String> value, ProcessFunction<Tuple2<String, String>, CurrentSensorEventDTO>.Context context, Collector<CurrentSensorEventDTO> collector) throws Exception { try { CurrentSensorEventData currentSensorEvent = JSONUtil.toBean(value.f1, CurrentSensorEventData.class); DoorInfosCache cache = SpringUtil.getBean(DoorInfosCache.class); LoadingCache<String, Equipment> equipmentLoadingCache = cache.getEquipmentLoadingCache(); String deviceId = currentSensorEvent.getDeviceId(); Equipment equipment; try { // 缓存中获取设备信息 equipment = equipmentLoadingCache.get(deviceId); } catch (Exception e) { log.error("电源传感器设备不存在,设备id:{}", deviceId); return; } String messageId = currentSensorEvent.getMessageId(); Long id = StringUtil.getIdFromMessageId(messageId); CurrentSensorEventData.Data data = currentSensorEvent.getData(); if (data == null) { log.error("电流传感器事件数据属性为空,currentSensorEvent:{}", JSONUtil.toJsonStr(currentSensorEvent)); return; } long currentTimeMillis = System.currentTimeMillis(); Date date = new Date(); CurrentSensorEventDTO eventDTO = CurrentSensorEventDTO.builder() .id(id) .messageId(messageId) .event(currentSensorEvent.getEvent()) .partnerId(equipment.getPartnerId()) .partnerName(equipment.getPartnerName()) .areaId(equipment.getAreaId()) .areaName(equipment.getAreaName()) .lineId(equipment.getLineId()) .lineName(equipment.getLineName()) .stationId(equipment.getStationId()) .stationName(equipment.getStationName()) .deviceName(equipment.getEquipmentName()) .deviceId(deviceId) .content(data.getContent()) .level(data.getLevel()) .alarmType(data.getAlarmtype()) .dateTime(TimeStringUtil.StringToTimestamp(data.getUploadTime())) .uploadTime(data.getUploadTime()) .createdBy("0") .createdAt(date) .createdTimestamp(currentTimeMillis) .updatedBy("0") .updatedAt(date) .updatedTimestamp(currentTimeMillis) .build(); collector.collect(eventDTO); } catch (JSONException e) { log.error("-----电流传感器事件通过JSON映射指定类对象失败-----{}-----{}", value, e.toString()); log.error(e.getMessage(), e); } catch (Exception e) { log.error("-----解析电流传感器事件失败-----{}-----{}", value, e.toString()); log.error(e.getMessage(), e); } } } 写温湿度处理代码
最新发布
09-05
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值