DuplicateKeyException solution

Caused by: com.ibm.websphere.ce.cm.DuplicateKeyException: [SQL0803] Dubbele sleutelwaarde opgegeven.
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:86)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:58)
at java.lang.reflect.Constructor.newInstance(Constructor.java:542)
at com.ibm.websphere.rsadapter.GenericDataStoreHelper.mapExceptionHelper(GenericDataStoreHelper.java:627)
at com.ibm.websphere.rsadapter.GenericDataStoreHelper.mapException(GenericDataStoreHelper.java:686)
at com.ibm.ws.rsadapter.AdapterUtil.mapException(AdapterUtil.java:2267)
at com.ibm.ws.rsadapter.jdbc.WSJdbcUtil.mapException(WSJdbcUtil.java:1191)
at com.ibm.ws.rsadapter.jdbc.WSJdbcPreparedStatement.executeUpdate(WSJdbcPreparedStatement.java:815)
at org.hibernate.id.IdentityGenerator$GetGeneratedKeysDelegate.executeAndExtract(IdentityGenerator.java:94)
at org.hibernate.id.insert.AbstractReturningDelegate.performInsert(AbstractReturningDelegate.java:57)
... 236 more


跑程序 突然遇到 上面的问题,之前一直好用。

原因:在新环境 新建的表 ,然后 插入了一些 静态数据(静态数据带入Id(key)),跑程序重新插入一条 数据,导致key 冲突!!!


解决: SELECT MAX(id)FROM TABLENAME;

     ALTER TABLE TABLENAME  ALTER COLUMN ID RESTART WITH  CHAR(MAX(id)+1);



#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ 完整的插件脚本,能够检测并报告所有类型的错误,按问题类型分组,并将连续时间戳合并为时间段 """ import os import Hubble.utils.basic.HiviewLogger as HiviewLogger import re import datetime from datetime import datetime, timedelta import json class PluginClass: def __init__(self): self.logger = HiviewLogger.HiviewLogger().get_logger() self.plugin_result = { "conclusion": { "level_one": "", "level_two": "", "reason_info": "", "solution": "", "weight_level": "", }, "analysis_process": { "show_order": ["1", "2"], "detailed_process": { "1": {"details": "", "CN": ""}, "2": {"details": "", "CN": ""} } }, "feature_data": {"": []} } def log_str_to_datetime(self, time_str): try: issue_time = datetime.strptime(time_str, '%m-%d %H:%M:%S.%f') current_year = datetime.now().year return issue_time.replace(year=current_year) except ValueError: try: issue_time = datetime.strptime(time_str, '%m-%d %H:%M:%S') current_year = datetime.now().year return issue_time.replace(year=current_year) except Exception: return datetime.now() def merge_time_ranges(self, events, max_gap_minutes=5): """修复的时间段合并函数""" if not events: return [] # 按时间戳排序 sorted_events = sorted(events, key=lambda x: self.log_str_to_datetime(x["timestamp"])) merged_ranges = [] # 初始化第一个时间段 current_start = self.log_str_to_datetime(sorted_events[0]["timestamp"]) current_end = current_start current_events = [sorted_events[0]] # 存储当前时间段的事件 for i in range(1, len(sorted_events)): event = sorted_events[i] event_time = self.log_str_to_datetime(event["timestamp"]) # 计算与前一个事件的时间间隔 gap = event_time - current_end # 如果间隔小于阈值,则扩展当前时间段 if gap.total_seconds() <= max_gap_minutes * 60: current_end = event_time current_events.append(event) else: # 保存当前时间段 merged_ranges.append({ "start": current_start, "end": current_end, "count": len(current_events), "events": current_events.copy() # 复制当前事件列表 }) # 开始新的时间段 current_start = event_time current_end = event_time current_events = [event] # 添加最后一个时间段 merged_ranges.append({ "start": current_start, "end": current_end, "count": len(current_events), "events": current_events }) return merged_ranges # 以下是各种问题检测函数(保持不变) def memAvailable(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*curBufKB=(?P<curBufKB>\d+)' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") memAvailable_value = int(match.group('curBufKB')) if memAvailable_value < 819200: results.append({"type": "memAvailable", "timestamp": timestamp, "value": memAvailable_value}) return results def am_kill(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*KillOneProc kill_reason=LowMemoryKill.*procName=(?P<package>[a-zA-Z0-9._]+)' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") kill_proc_name = match.group("package") results.append({"type": "am_kill", "timestamp": timestamp, "proc_name": kill_proc_name}) return results def temperature(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*new temp: (?P<new_temp>\d+)' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") temp_value = int(match.group("new_temp")) if temp_value > 43: results.append({"type": "temperature", "timestamp": timestamp, "value": temp_value}) return results def storage(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*roundSize=(?P<round_size>\d+),.*freeSize=(?P<free_size>\d+)' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") round_size = int(match.group("round_size")) free_size = int(match.group("free_size")) if round_size > 0: ratio = free_size / round_size if ratio <= 0.15: results.append({"type": "storage", "timestamp": timestamp, "ratio": ratio}) return results def free_sec(self, hilog_kmsg): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*free_sec=(?P<free_sec>\d+).*Free =(?P<free>\d+)MB' for line in hilog_kmsg: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") free_sec_value = int(match.group("free_sec")) free_value = int(match.group("free")) if free_value > 0: ratio = 1 - (free_sec_value * 2) / free_value if ratio > 0.8: results.append({"type": "free_sec", "timestamp": timestamp, "ratio": ratio}) return results def ps_slow(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*CHR name=(PS_SLOW_EVENT), type' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") results.append({"type": "ps_slow", "timestamp": timestamp}) return results def power_low(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*powermgr/BatteryInfo: capacity=(?P<capacity>\d+), voltage' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") capacity_value = int(match.group("capacity")) if capacity_value < 10: results.append({"type": "power_low", "timestamp": timestamp, "value": capacity_value}) return results def voltage(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*powermgr/BatteryInfo: .*voltage=(?P<voltage>\d+), temperature' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") voltage_value = int(match.group("voltage")) if voltage_value < 3500000: results.append({"type": "voltage", "timestamp": timestamp, "value": voltage_value}) return results def cup_load(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*cpuLoadPercent:(?P<cpuLoadPercent>\d+), thermalLevel' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") try: cpu_value = int(match.group("cpuLoadPercent")) if cpu_value > 600000: results.append({"type": "cup_load", "timestamp": timestamp, "value": cpu_value}) except ValueError: continue return results def gpu_load(self, hilog): results = [] pattern = r'(?P<timestamp>\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3}).*Get GpuTotalUsed value is (?P<gpuUsed>\d+)' for line in hilog: match = re.search(pattern, line) if match: timestamp = match.group("timestamp") try: gpu_value = int(match.group("gpuUsed")) if gpu_value > 600000: results.append({"type": "gpu_load", "timestamp": timestamp, "value": gpu_value}) except ValueError: continue return results def plugin_preprocess(self, info_dict=None): self.logger.info("预处理函数执行中...") task_list = [] download_list = info_dict["common_info"]["log_download_list"] max_analysis_num = 1 if len(download_list) > max_analysis_num: download_list = download_list[:max_analysis_num] for download_item in download_list: task_dict = {"download_addr": [], "task_params": {}} task_dict["download_addr"] = download_item task_list.append(task_dict) return task_list def run_plugin_single(self, uuid_log_path=None, log_path=None, param_dict=None): self.logger.info("分析脚本执行中...uuid_log_path=[%s]", uuid_log_path) error_mapping = { "memAvailable": {"reason": "整机低内存(RAM)", "solution": "1、清除后台应用:桌面手势导航底部上划进入多任务中心,清理后台多任务应用卡片(三键导航点击导航键的方块图标可以进入多任务界面)2、长按电源键关机重启"}, "am_kill": {"reason": "应用因低内存被查杀", "solution": "1、清除后台应用:桌面手势导航底部上划进入多任务中心,清理后台多任务应用卡片(三键导航点击导航键的方块图标可以进入多任务界面)2、长按电源键关机重启"}, "temperature": {"reason": "整机温度过高", "solution": "建议参考手机/平板使用过程中设备发热相关知识排查"}, "storage": {"reason": "整机可用存储空间不足", "solution": "建议参考'华为手机/平板内存占用多,提示内存不足如何处理?'知识中场景一:存储剩余空间不足排查方案处理。"}, "free_sec": {"reason": "整机碎片化过高", "solution": "1、建议引导用户进入设置>存储,进行整机清理加速,清理不需要的垃圾文件、联网缓存及不常用的应用;2、建议用户尝试夜间熄屏充电,持续此操作2-3晚上清理碎片化,促进系统优化"}, "ps_slow": {"reason": "整机网络不佳", "solution": "当前所处网络环境覆盖较弱或干扰较大,建议引导用户更换网络环境"}, "power_low": {"reason": "整机低电量", "solution": "建议在电量充足情况下使用"}, "voltage": {"reason": "整机低电压", "solution": "建议检查电池健康度并在电量充足情况下继续使用体验"}, "cup_load": {"reason": "整机CPU高负载", "solution": "1、清除后台应用:桌面手势导航底部上划进入多任务中心,清理后台多任务应用卡片(三键导航点击导航键的方块图标可以进入多任务界面);2、引导用户排查是否存在高速下载、 数据传输、多悬浮窗口使用等场景,以上高负载场景会导致设备卡顿,建议用户避免长时间使用上述场景;3、长按电源键关机重启;"}, "gpu_load": {"reason": "整机GPU高负载", "solution": "1、清除后台应用:桌面手势导航底部上划进入多任务中心,清理后台多任务应用卡片(三键导航点击导航键的方块图标可以进入多任务界面);2、引导用户排查是否存在高速下载、 数据传输、多悬浮窗口使用等场景,以上高负载场景会导致设备卡顿,建议用户避免长时间使用上述场景;3、长按电源键关机重启;"} } app_path = [] kmsg_path = [] for root, dirs, files in os.walk(uuid_log_path): for file in files: if "hilog." in file: app_path.append(os.path.join(root, file)) if "hilog_kmsg" in file: kmsg_path.append(os.path.join(root, file)) app_list = [] for app in app_path: try: with open(app, "r", encoding="utf-8", errors='ignore') as app_reader: app_list += app_reader.readlines() except Exception as e: self.logger.error(f"读取日志文件失败: {e}") kmsg_list = [] for kmsg in kmsg_path: try: with open(kmsg, "r", encoding="utf-8", errors='ignore') as kmsg_reader: kmsg_list += kmsg_reader.readlines() except Exception as e: self.logger.error(f"读取kmsg日志失败: {e}") detected_issues = { "memAvailable": self.memAvailable(app_list), "am_kill": self.am_kill(app_list), "temperature": self.temperature(app_list), "storage": self.storage(app_list), "free_sec": self.free_sec(kmsg_list), "ps_slow": self.ps_slow(app_list), "power_low": self.power_low(app_list), "voltage": self.voltage(app_list), "cup_load": self.cup_load(app_list), "gpu_load": self.gpu_load(app_list) } merged_issues = {} for issue_type, issues in detected_issues.items(): if issues: merged_issues[issue_type] = self.merge_time_ranges(issues) report_by_type = {} solutions = set() for issue_type, time_ranges in merged_issues.items(): if not time_ranges: continue issue_report = [] for time_range in time_ranges: start_time = time_range["start"].strftime("%m-%d %H:%M:%S") end_time = time_range["end"].strftime("%m-%d %H:%M:%S") if time_range["count"] == 1: time_info = f"{start_time}" else: time_info = f"{start_time} 至 {end_time} ({time_range['count']}次)" details = error_mapping[issue_type]["reason"] if issue_type == "am_kill": apps = list(set(event.get("proc_name", "未知应用") for event in time_range["events"])) details += f" ({', '.join(apps)})" elif issue_type in ["memAvailable", "temperature", "power_low", "voltage", "cup_load", "gpu_load"]: values = [event.get("value", 0) for event in time_range["events"]] min_val = min(values) max_val = max(values) avg_val = sum(values) / len(values) unit = "" if issue_type == "memAvailable": unit = "KB" elif issue_type == "temperature": unit = "°C" elif issue_type == "power_low": unit = "%" elif issue_type == "voltage": unit = "mV" details += f" (值范围: {min_val}-{max_val}{unit}, 平均: {avg_val:.1f}{unit})" elif issue_type in ["storage", "free_sec"]: ratios = [event.get("ratio", 0) for event in time_range["events"]] avg_ratio = sum(ratios) / len(ratios) * 100 details += f" (平均: {avg_ratio:.1f}%)" issue_report.append(f"{time_info}: {details}") report_by_type[issue_type] = issue_report solutions.add(error_mapping[issue_type]["solution"]) reason_info = [] if not report_by_type: reason_info.append("当前整机各项指标未诊断出异常") else: for issue_type, issue_list in report_by_type.items(): reason_info.append(f"<b>{error_mapping[issue_type]['reason']}</b>") reason_info.extend(issue_list) reason_info.append("") solution_str = "<br>".join(solutions) if solutions else"建议引导用户重新复现问题反馈日志" self.plugin_result["conclusion"]["reason_info"] = "<br>".join(reason_info) self.plugin_result["conclusion"]["solution"] = solution_str self.plugin_result["conclusion"]["level_one"] = "稳定性问题" self.plugin_result["conclusion"]["weight_level"] = "medium" return self.plugin_result def plugin_result_refinement(self, result): return result 这是我的脚本,帮我解决一下这俩问题,返回一个完整的代码给我
09-16
2025-11-25 17:46:46,879 - ERROR - 行级操作失败: P09192-1, (pymysql.err.OperationalError) (1292, "Incorrect datetime value: 'NaT' for column 'Resolved' at row 1") [SQL: INSERT INTO jira_task_bsw_deal (`Summary`, `Issue key`, `Issue id`, `Parent id`, `Issue Type`, `Status`, `Project key`, `Project name`, `Project type`, `Project lead`, `Project description`, `Project url`, `Priority`, `Resolution`, `Assignee`, `Reporter`, `Creator`, `Created`, `Updated`, `Last Viewed`, `Resolved`, `Affects Version/s`, `Fix Version/s`, `Component/s`, `Due Date`, `Labels`, `Description`, `Environment`, `Watchers`, `Log Work`, `Security Level`, `Attachment`, total, `Custom field (Control No)`, `Custom field (Custom_1)`, `Custom field (Custom_2)`, `Custom field (Date of first Response)`, `Custom field (Experience Owner)`, `Custom field (Fixed in release)`, `Custom field (Found By)`, `Custom field (Found in HW version)`, `Custom field (Found in SW version)`, `Custom field (KOCHI Issue Type)`, `Custom field (KOCHI Issue Type Transition)`, `Custom field (Last Comment)`, `Custom field (Matter)`, `Custom field (Project Number)`, `Custom field (Rank)`, `Custom field (Realisation planned)`, `Custom field (Report Number)`, `Custom field (Report Type)`, `Custom field (Reported By)`, `Custom field (Risk)`, `Custom field (Start Date)`, `Custom field (Team)`, `Comment`, `Involvement of BSW`, `Date`, `ProjectNum`, `BSW Self Test`, `BSW Issue`, `Stack BSW Analyzed`, `Stack BSW Unanalyzed`, `Stack Total`, `BSW Involve`, `BSW Involve Unclosed`, `HorizontalExpansion Count`, `HorizontalExpansion PN`, `HorizontalExpansion Unfinished`, `HorizontalExpansion Delay`, `BSW Analysis Conclusions`, `Stack Classification`, `BSW Reason Classification`, `BSW Analyzed`, `BSW Unanalyzed`, `BSW Staff List`, `Found By Classification`, `High Priority Unanalyzed`, `Stack_Analyzed`, `Stack_Unanalyzed`, update_over_15, update_over_7, `Responsible`, `Email`) VALUES (%(Summary_m0)s, %(Issue_key_m0)s, %(Issue_id_m0)s, %(Parent_id_m0)s, %(Issue_Type_m0)s, %(Status_m0)s, %(Project_key_m0)s, %(Project_name_m0)s, %(Project_type_m0)s, %(Project_lead_m0)s, %(Project_description_m0)s, %(Project_url_m0)s, %(Priority_m0)s, %(Resolution_m0)s, %(Assignee_m0)s, %(Reporter_m0)s, %(Creator_m0)s, %(Created_m0)s, %(Updated_m0)s, %(Last_Viewed_m0)s, %(Resolved_m0)s, %(Affects_Version/s_m0)s, %(Fix_Version/s_m0)s, %(Component/s_m0)s, %(Due_Date_m0)s, %(Labels_m0)s, %(Description_m0)s, %(Environment_m0)s, %(Watchers_m0)s, %(Log_Work_m0)s, %(Security_Level_m0)s, %(Attachment_m0)s, %(total_m0)s, %(Custom_field_AControl_NoZ_m0)s, %(Custom_field_ACustom_1Z_m0)s, %(Custom_field_ACustom_2Z_m0)s, %(Custom_field_ADate_of_first_ResponseZ_m0)s, %(Custom_field_AExperience_OwnerZ_m0)s, %(Custom_field_AFixed_in_releaseZ_m0)s, %(Custom_field_AFound_ByZ_m0)s, %(Custom_field_AFound_in_HW_versionZ_m0)s, %(Custom_field_AFound_in_SW_versionZ_m0)s, %(Custom_field_AKOCHI_Issue_TypeZ_m0)s, %(Custom_field_AKOCHI_Issue_Type_TransitionZ_m0)s, %(Custom_field_ALast_CommentZ_m0)s, %(Custom_field_AMatterZ_m0)s, %(Custom_field_AProject_NumberZ_m0)s, %(Custom_field_ARankZ_m0)s, %(Custom_field_ARealisation_plannedZ_m0)s, %(Custom_field_AReport_NumberZ_m0)s, %(Custom_field_AReport_TypeZ_m0)s, %(Custom_field_AReported_ByZ_m0)s, %(Custom_field_ARiskZ_m0)s, %(Custom_field_AStart_DateZ_m0)s, %(Custom_field_ATeamZ_m0)s, %(Comment_m0)s, %(Involvement_of_BSW_m0)s, %(Date_m0)s, %(ProjectNum_m0)s, %(BSW_Self_Test_m0)s, %(BSW_Issue_m0)s, %(Stack_BSW_Analyzed_m0)s, %(Stack_BSW_Unanalyzed_m0)s, %(Stack_Total_m0)s, %(BSW_Involve_m0)s, %(BSW_Involve_Unclosed_m0)s, %(HorizontalExpansion_Count_m0)s, %(HorizontalExpansion_PN_m0)s, %(HorizontalExpansion_Unfinished_m0)s, %(HorizontalExpansion_Delay_m0)s, %(BSW_Analysis_Conclusions_m0)s, %(Stack_Classification_m0)s, %(BSW_Reason_Classification_m0)s, %(BSW_Analyzed_m0)s, %(BSW_Unanalyzed_m0)s, %(BSW_Staff_List_m0)s, %(Found_By_Classification_m0)s, %(High_Priority_Unanalyzed_m0)s, %(Stack_Analyzed_m0)s, %(Stack_Unanalyzed_m0)s, %(update_over_15_m0)s, %(update_over_7_m0)s, %(Responsible_m0)s, %(Email_m0)s) AS new ON DUPLICATE KEY UPDATE `Summary` = new.`Summary`, `Issue id` = new.`Issue id`, `Parent id` = new.`Parent id`, `Issue Type` = new.`Issue Type`, `Status` = new.`Status`, `Project key` = new.`Project key`, `Project name` = new.`Project name`, `Project type` = new.`Project type`, `Project lead` = new.`Project lead`, `Project description` = new.`Project description`, `Project url` = new.`Project url`, `Priority` = new.`Priority`, `Resolution` = new.`Resolution`, `Assignee` = new.`Assignee`, `Reporter` = new.`Reporter`, `Creator` = new.`Creator`, `Created` = new.`Created`, `Updated` = new.`Updated`, `Last Viewed` = new.`Last Viewed`, `Resolved` = new.`Resolved`, `Affects Version/s` = new.`Affects Version/s`, `Fix Version/s` = new.`Fix Version/s`, `Component/s` = new.`Component/s`, `Due Date` = new.`Due Date`, `Labels` = new.`Labels`, `Description` = new.`Description`, `Environment` = new.`Environment`, `Watchers` = new.`Watchers`, `Log Work` = new.`Log Work`, `Security Level` = new.`Security Level`, `Attachment` = new.`Attachment`, total = new.total, `Custom field (Control No)` = new.`Custom field (Control No)`, `Custom field (Custom_1)` = new.`Custom field (Custom_1)`, `Custom field (Custom_2)` = new.`Custom field (Custom_2)`, `Custom field (Date of first Response)` = new.`Custom field (Date of first Response)`, `Custom field (Experience Owner)` = new.`Custom field (Experience Owner)`, `Custom field (Fixed in release)` = new.`Custom field (Fixed in release)`, `Custom field (Found By)` = new.`Custom field (Found By)`, `Custom field (Found in HW version)` = new.`Custom field (Found in HW version)`, `Custom field (Found in SW version)` = new.`Custom field (Found in SW version)`, `Custom field (KOCHI Issue Type)` = new.`Custom field (KOCHI Issue Type)`, `Custom field (KOCHI Issue Type Transition)` = new.`Custom field (KOCHI Issue Type Transition)`, `Custom field (Last Comment)` = new.`Custom field (Last Comment)`, `Custom field (Matter)` = new.`Custom field (Matter)`, `Custom field (Project Number)` = new.`Custom field (Project Number)`, `Custom field (Rank)` = new.`Custom field (Rank)`, `Custom field (Realisation planned)` = new.`Custom field (Realisation planned)`, `Custom field (Report Number)` = new.`Custom field (Report Number)`, `Custom field (Report Type)` = new.`Custom field (Report Type)`, `Custom field (Reported By)` = new.`Custom field (Reported By)`, `Custom field (Risk)` = new.`Custom field (Risk)`, `Custom field (Start Date)` = new.`Custom field (Start Date)`, `Custom field (Team)` = new.`Custom field (Team)`, `Comment` = new.`Comment`, `Involvement of BSW` = new.`Involvement of BSW`, `Date` = new.`Date`, `ProjectNum` = new.`ProjectNum`, `BSW Self Test` = new.`BSW Self Test`, `BSW Issue` = new.`BSW Issue`, `Stack BSW Analyzed` = new.`Stack BSW Analyzed`, `Stack BSW Unanalyzed` = new.`Stack BSW Unanalyzed`, `Stack Total` = new.`Stack Total`, `BSW Involve` = new.`BSW Involve`, `BSW Involve Unclosed` = new.`BSW Involve Unclosed`, `HorizontalExpansion Count` = new.`HorizontalExpansion Count`, `HorizontalExpansion PN` = new.`HorizontalExpansion PN`, `HorizontalExpansion Unfinished` = new.`HorizontalExpansion Unfinished`, `HorizontalExpansion Delay` = new.`HorizontalExpansion Delay`, `BSW Analysis Conclusions` = new.`BSW Analysis Conclusions`, `Stack Classification` = new.`Stack Classification`, `BSW Reason Classification` = new.`BSW Reason Classification`, `BSW Analyzed` = new.`BSW Analyzed`, `BSW Unanalyzed` = new.`BSW Unanalyzed`, `BSW Staff List` = new.`BSW Staff List`, `Found By Classification` = new.`Found By Classification`, `High Priority Unanalyzed` = new.`High Priority Unanalyzed`, `Stack_Analyzed` = new.`Stack_Analyzed`, `Stack_Unanalyzed` = new.`Stack_Unanalyzed`, update_over_15 = new.update_over_15, update_over_7 = new.update_over_7, `Responsible` = new.`Responsible`, `Email` = new.`Email`] [parameters: {'Summary_m0': 'GW3 LL Workshop', 'Issue_key_m0': 'P09192-1', 'Issue_id_m0': '1302030', 'Parent_id_m0': '', 'Issue_Type_m0': 'ToDo', 'Status_m0': 'To Do', 'Project_key_m0': 'P09192', 'Project_name_m0': 'P09192 GWM_BLE_BNWC_P14', 'Project_type_m0': 'software', 'Project_lead_m0': 'wang161', 'Project_description_m0': '', 'Project_url_m0': '', 'Priority_m0': 'Medium', 'Resolution_m0': '', 'Assignee_m0': 'pan009', 'Reporter_m0': 'yu017', 'Creator_m0': 'yu017', 'Created_m0': Timestamp('2025-05-26 07:18:00'), 'Updated_m0': Timestamp('2025-06-16 07:08:00'), 'Last_Viewed_m0': Timestamp('2025-11-24 19:24:00'), 'Resolved_m0': NaT, 'Affects_Version/s_m0': '', 'Fix_Version/s_m0': '', 'Component/s_m0': '', 'Due_Date_m0': Timestamp('2026-05-23 00:00:00'), 'Labels_m0': 'LL-Workshop', 'Description_m0': 'AEQ: Pan Lili\r\nThis ticket serves as main ticket for the initial lessons learned Workshop at GW3.\r\nDate: 20250616\r\nInvited:See attachment\r\nBe ... (406 characters truncated) ... implementation has to be checked by AEQ. If the implementation cannot be verified, the sub-task is reopened and assigned back to the originator.\r\n', 'Environment_m0': '', 'Watchers_m0': '', 'Log_Work_m0': '', 'Security_Level_m0': '', 'Attachment_m0': '11.06.2025 03:06;yu017;P09192-GWM-C06 BLE.xlsx;https://jira.kostal.com/secure/attachment/1234604/P09192-GWM-C06+BLE.xlsx||16.06.2025 07:08;yu017;答复 ... (32 characters truncated) ... s://jira.kostal.com/secure/attachment/1237494/%E7%AD%94%E5%A4%8D++P09192%E9%A1%B9%E7%9B%AE%E7%9A%84LL+kickoff%E5%AF%BC%E5%85%A5%E4%BC%9A%E8%AE%AE.msg', 'total_m0': 0, 'Custom_field_AControl_NoZ_m0': '', 'Custom_field_ACustom_1Z_m0': '', 'Custom_field_ACustom_2Z_m0': '', 'Custom_field_ADate_of_first_ResponseZ_m0': None, 'Custom_field_AExperience_OwnerZ_m0': '', 'Custom_field_AFixed_in_releaseZ_m0': '', 'Custom_field_AFound_ByZ_m0': '', 'Custom_field_AFound_in_HW_versionZ_m0': '', 'Custom_field_AFound_in_SW_versionZ_m0': '', 'Custom_field_AKOCHI_Issue_TypeZ_m0': '', 'Custom_field_AKOCHI_Issue_Type_TransitionZ_m0': '', 'Custom_field_ALast_CommentZ_m0': '', 'Custom_field_AMatterZ_m0': '', 'Custom_field_AProject_NumberZ_m0': '', 'Custom_field_ARankZ_m0': '1|i45am8:', 'Custom_field_ARealisation_plannedZ_m0': '', 'Custom_field_AReport_NumberZ_m0': '', 'Custom_field_AReport_TypeZ_m0': '', 'Custom_field_AReported_ByZ_m0': '', 'Custom_field_ARiskZ_m0': '', 'Custom_field_AStart_DateZ_m0': '', 'Custom_field_ATeamZ_m0': '', 'Comment_m0': '', 'Involvement_of_BSW_m0': 'No', 'Date_m0': Timestamp('2025-11-25 02:24:10'), 'ProjectNum_m0': 'P09192', 'BSW_Self_Test_m0': 0, 'BSW_Issue_m0': 0, 'Stack_BSW_Analyzed_m0': 0, 'Stack_BSW_Unanalyzed_m0': 0, 'Stack_Total_m0': 0, 'BSW_Involve_m0': 0, 'BSW_Involve_Unclosed_m0': 0, 'HorizontalExpansion_Count_m0': 0, 'HorizontalExpansion_PN_m0': None, 'HorizontalExpansion_Unfinished_m0': 0, 'HorizontalExpansion_Delay_m0': 0, 'BSW_Analysis_Conclusions_m0': 'Non-BSW', 'Stack_Classification_m0': 'Unclassed', 'BSW_Reason_Classification_m0': '底层未参与分析', 'BSW_Analyzed_m0': 0, 'BSW_Unanalyzed_m0': 0, 'BSW_Staff_List_m0': 'No', 'Found_By_Classification_m0': 'Others', 'High_Priority_Unanalyzed_m0': 0, 'Stack_Analyzed_m0': 0, 'Stack_Unanalyzed_m0': 0, 'update_over_15_m0': 0, 'update_over_7_m0': 0, 'Responsible_m0': 'Bob', 'Email_m0': 'bob@example.com'}] (Background on this error at: https://sqlalche.me/e/20/e3q8) 2025-11-25 17:46:46,883 - INFO - 成功写入/更新 3263 条记录 2025-11-25 17:46:46,883 - INFO - 成功写入 3894 条记录到 jira_task_bsw_deal 2025-11-25 17:46:46,883 - INFO - 数据处理完成,共写入 3894 条记录
最新发布
11-26
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值