# -*- coding: utf-8 -*-
"""
@file: interlockledger
@author: maxiaolong01
@date: 2025/7/7 16:10
"""
from typing import Annotated, Optional, List
from pydantic import BaseModel, Field, validator
from app.models.interlock.utils import InterLockType, ModuleType, LogLevelType
class BaseInterLockLedger(BaseModel):
alarm_name: Annotated[str | None, Field(alias="alarmName", title="报警名称", description="报警名称")] = None
lock_type: Annotated[InterLockType | None, Field(alias="lockType", title="互锁类型")] = None
product: Annotated[Optional[str], Field(alias="product", title='所属产品', description='所属产品')] = None
module_name: Annotated[str| None, Field(alias='moduleName', title='所属模块', description='所属模块')] = None
module_type: Annotated[ModuleType, Field(alias='moduleType', title='模块类型', description='模块类型')] = None
dept_name: Annotated[
Optional[str] | None, Field(alias='deptName', title='所属部门名称', description='所属部门名称')] = None
trigger: Annotated[str | None, Field(alias='trigger', title='触发器', description='触发器')] = None
channel_number: Annotated[
Optional[str | int] | None, Field(alias='channelNumber', title='渠道号', description='渠道号')] = None
trigger_way: Annotated[str | None, Field(alias='triggerWay', title='触发器渠道', description='触发器渠道')] = None
alarm_desc: Annotated[str | None, Field(alias='alarmDesc', title='报警描述', description='报警描述')] = None
alarm_level: Annotated[LogLevelType,
Field(alias='alarmLevel', title='报警级别', description='报警等级')] = None
alarm_detail: Annotated[str | None, Field(alias='alarmDetail', title='报警详情', description='报警详情')] = None
alarm_action: Annotated[str | None, Field(alias='alarmAction', title='报警动作', description='报警动作')] = None
alarm_channel: Annotated[
str | None, Field(alias='alarmChannel', title='报警动作Data通道', description='报警动作Data通道')] = None
alarm_action_desc: Annotated[
str | None, Field(alias='alarmActionDesc', title='报警动作描述', description='报警动作描述')] = None
condition: Annotated[str | None, Field(alias='condition', title='满足条件', description='满足条件')] = None
desc: Annotated[str | None, Field(alias='desc', title='备注', description='备注')] = None
class Config:
populate_by_name = True
extra = "allow"
class InterLockLedgerSearch(BaseInterLockLedger):
current: Annotated[int | None, Field(title="页码")] = 1
size: Annotated[int | None, Field(title="每页数量")] = 10
order_list: Annotated[list[str] | None, Field(
alias='orderList', title='排序')] = None
class InterLockLedgerCreate(BaseInterLockLedger):
...
class InterLockLedgerUpdate(BaseInterLockLedger):
...
# -*- coding: utf-8 -*-
"""
@file: ledger
@author: maxiaolong01
@date: 2025/7/7 11:45
"""
import io
from typing import List
import numpy as np
import pandas as pd
from fastapi import APIRouter, Query, File, UploadFile, HTTPException, status
from tortoise.expressions import Q
from app.api.v1.utils import insert_log
from app.controllers.interlockledger import interlockledger_controller
from app.controllers.system_manage.user import user_controller
from app.controllers.zentao.zt_equip import ztequip_controller
from app.core.ctx import CTX_USER_ID
from app.models.system import LogType, LogDetailType
from app.schemas.base import SuccessExtra, Success
from app.schemas.interlockledger import InterLockLedgerSearch, InterLockLedgerCreate, InterLockLedgerUpdate
from app.utils.cdg_tool import CdgClient
router = APIRouter()
@router.post("/ledgers/all", summary="查看互锁台账列表")
async def _(obj_in: InterLockLedgerSearch):
ids = obj_in.__pydantic_extra__.get("ids")
q = Q(is_del=0) # 只查询未删除的记录)
if ids:
q &= Q(Q(id__in=ids))
if obj_in.alarm_name:
q &= Q(alarm_name__contains=obj_in.alarm_name)
if obj_in.product:
q &= Q(product=obj_in.product)
if obj_in.lock_type:
q &= Q(lock_type=obj_in.lock_type)
if obj_in.module_name:
q &= Q(module_name__contains=obj_in.module_name)
if obj_in.alarm_level:
q &= Q(alarm_level=obj_in.alarm_level)
if obj_in.dept_name:
q &= Q(dept_name__contains=obj_in.dept_name)
if obj_in.module_type:
q &= Q(module_type=obj_in.module_type)
total, api_objs = await interlockledger_controller.list(page=obj_in.current, page_size=obj_in.size, search=q,
order=obj_in.order_list or ["-id"])
records = []
for obj in api_objs:
data = await obj.to_dict(exclude_fields=[])
records.append(data)
data = {"records": records}
user_id = CTX_USER_ID.get()
user_obj = await user_controller.get(id=user_id)
await insert_log(log_type=LogType.ApiLog, log_detail_type=LogDetailType.InterLockLedgerList,
by_user_id=user_obj.id)
return SuccessExtra(data=data, total=total, current=obj_in.current, size=obj_in.size)
@router.post(path='/ledgers', summary='创建互锁台账记录')
async def _(obj_in: InterLockLedgerCreate):
# 互锁台账记录创建接口
new_ledger = await interlockledger_controller.create(obj_in=obj_in)
user_id = CTX_USER_ID.get()
user_obj = await user_controller.get(id=user_id)
await insert_log(log_type=LogType.ApiLog, log_detail_type=LogDetailType.InterLockLedgerCreate, by_user_id=user_obj.id)
return Success(msg='Create Successfully', data={"create_id": new_ledger.id})
@router.patch(path='/ledgers/{ledger_id}', summary='更新互锁台账记录')
async def _(ledger_id: int, obj_in: InterLockLedgerUpdate):
await interlockledger_controller.update(id=ledger_id, obj_in=obj_in)
user_id = CTX_USER_ID.get()
user_obj = await user_controller.get(id=user_id)
await insert_log(log_type=LogType.ApiLog, log_detail_type=LogDetailType.InterLockLedgerUpdate, by_user_id=user_obj.id)
return Success(msg="Updated Successfully", data={"updated_id": ledger_id})
@router.delete(path='/ledgers/{ledger_id}', summary='删除互锁台账记录')
async def _(ledger_id: int):
await interlockledger_controller.remove(id=ledger_id)
user_id = CTX_USER_ID.get()
user_obj = await user_controller.get(id=user_id)
#await interlockledger_controller.update(id=ledger_id, obj_in={"is_del": 1})
await insert_log(log_type=LogType.ApiLog, log_detail_type=LogDetailType.InterLockLedgerDelete, by_user_id=user_obj.id)
return Success(msg="Deleted Successfully", data={"deleted_id": ledger_id})
@router.delete(path='/ledgers', summary='批量删除互锁台账记录')
async def _(ids: str = Query(..., description='互锁台账ID列表,用逗号隔开')):
ledger_ids = ids.split(",")
deleted_ids = []
print('删除互锁台账id记录')
for ledger_id in ledger_ids:
# 改为标记删除(软删除)
await interlockledger_controller.remove(id=int(ledger_id))
user_id = CTX_USER_ID.get()
user_obj = await user_controller.get(id=user_id)
await insert_log(log_type=LogType.ApiLog, log_detail_type=LogDetailType.InterLockLedgerBatchDelete, by_user_id=user_obj.id)
return Success(msg="Deleted Successfully", data={"deleted_ids": deleted_ids})
COLUMN_MAPPING = {
"报警名称": "alarm_name",
"互锁类型": "lock_type",
"所属模块": "module_name",
"模块类型": "module_type",
"所属产品": "product",
"所属部门": "dept_name",
"触发器": "trigger",
"渠道号": "channel_number",
"触发器渠道": "trigger_way",
"报警描述": "alarm_desc",
"报警级别": "alarm_level",
"报警动作": "alarm_action",
"报警动作Data通道": "alarm_channel",
"报警动作描述": "alarm_action_desc",
"满足条件": "condition",
"备注": "desc"
}
REQUIRED_COLUMNS = ["报警名称", "互锁类型", "所属模块", "模块类型", "所属产品"]
@router.post(path='/ledgers/upload', summary='互锁台账上传')
async def _(files: List[UploadFile] = File(...)):
# 获取当前用户ID
user_id = CTX_USER_ID.get()
log_user_id = user_id if user_id else 0
# 初始化结果集
results = []
total_records = 0
success_files = 0
print('互锁台账上传逻辑')
cdg = CdgClient()
for file in files:
print(file.filename)
file_result = {
"filename": file.filename,
"status": "pending",
"records": 0,
"errors": []
}
try:
# 1. 验证文件类型
if not file.filename.lower().endswith(('.xlsx', '.xls', '.csv')):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="仅支持.xlsx、.csv和.xls格式的Excel文件"
)
# 2. 读取Excel文件
buffer = io.BytesIO()
# buffer.seek(0)
print(1111, file.filename)
async for chunk in cdg.decrypt_file_stream(file, file_name=file.filename):
buffer.write(chunk)
# contents = await file.read()
print(2222)
df = pd.read_excel(buffer, sheet_name=0, engine='openpyxl')
# 检查是否成功读取到数据
if df.empty:
file_result["errors"].append("第一个工作表为空或无法解析\n")
file_result["status"] = "failed"
results.append(file_result)
continue
print(f"成功读取第一个工作表,列名: {df.columns}")
# print(df.columns)
# 3. 验证列名是否匹配
missing_required = [cn for cn in REQUIRED_COLUMNS if cn not in df.columns]
if missing_required:
file_result["errors"].append(f"缺少必填列: {', '.join(missing_required)}\n")
file_result["status"] = "failed"
results.append(file_result)
continue
other_missing = [cn for cn in df.columns if cn not in COLUMN_MAPPING]
if other_missing:
file_result["warnings"] = [f"忽略未定义列: {', '.join(other_missing)}\n"]
# 5. 重命名列(中文→英文)
df.rename(columns=COLUMN_MAPPING, inplace=True)
# 6. 处理空值
df.replace({np.nan: None}, inplace=True)
# 6-1. 确保部门列存在
if 'dept_name' not in df.columns:
df['dept_name'] = None
# 7. 产品验证与部门填充 ==============================
# 收集所有非空product
valid_products = set()
col_idx = df.columns.get_loc('product')
for row in df.itertuples(index=False):
product_val = row[col_idx]
if product_val and str(product_val).strip():
valid_products.add(str(product_val).strip())
# 批量查询产品部门映射
product_dept_map = {}
if valid_products:
try:
product_dept_map = await ztequip_controller.get_product_dept_map(list(valid_products))
print(f"product_info{product_dept_map}")
except Exception as e:
file_result["errors"].append(f"产品信息查询失败: {str(e)}\n")
# 更新部门名称并验证产品存在性
dept_idx = df.columns.get_loc('dept_name')
for i, row in enumerate(df.itertuples(index=False)):
product_val = row[col_idx]
if not product_val or not str(product_val).strip():
continue
clean_product = str(product_val).strip()
print(f"clean_product:{clean_product}")
if clean_product in product_dept_map:
# 更新内存中的行数据
row_list = list(row)
row_list[dept_idx] = product_dept_map[clean_product]
# 更新DataFrame
df.loc[i] = row_list
else:
file_result["errors"].append(f"第{i + 2}行: 产品'{clean_product}'不存在\n")
# 8. 必填字段检查 (使用高效遍历)
required_fields_en = [COLUMN_MAPPING[col] for col in REQUIRED_COLUMNS]
for i, row in enumerate(df.itertuples(index=False)):
row_dict = dict(zip(df.columns, row))
for col in required_fields_en:
val = row_dict.get(col)
if val is None or str(val).strip() == "":
error_msg = f"第{i + 2}行: '{col}'不能为空"
file_result["errors"].append(f"{error_msg}\n")
# 8. 数据验证和转换
valid_records = [] # 存储(行索引, 记录)的列表
for index, row in df.iterrows():
record = InterLockLedgerCreate(**row.to_dict())
valid_records.append((index, record)) # 记录行索引和记录
# 9. 批量保存到数据库
saved_count = 0
# print(f"valid_records:{valid_records}")
for index, record in valid_records:
print(f"record:{record}")
try:
await interlockledger_controller.create(obj_in=record)
saved_count += 1
except Exception as e: # 捕获所有保存时的异常
error_msg = f"第{index + 2}行数据保存失败: {str(e)}"
file_result["errors"].append(f"{error_msg}\n")
# 10. 更新该文件的上传结果
file_result["records"] = saved_count
if file_result["errors"]:
if saved_count > 0:
file_result["status"] = "partial"
else:
file_result["status"] = "failed"
else:
file_result["status"] = "success"
results.append(file_result)
total_records += saved_count
if saved_count > 0:
success_files += 1
# 记录日志(每个文件处理完后记录?但原代码是在循环内记录,这里我们保持原样,即每个文件处理完记录一次)
await insert_log(
log_type=LogType.UserLog,
log_detail_type=LogDetailType.InterLockLedgerUpload,
by_user_id=log_user_id,
)
# except HTTPException as e:
# file_result["status"] = "failed"
# file_result["errors"].append(f"{str(e.detail)}\n")
# results.append(file_result)
except Exception as e:
file_result["status"] = "failed"
# file_result["errors"].append(f"系统错误: {str(e)}")
for err in e.errors():
loc = err.get('loc', 'unknown')
msg = err.get('msg', 'no message')
inp = err.get('input', 'N/A')
file_result["errors"].append(f"字段{loc}: {msg}, but input is {inp}.\n")
results.append(file_result)
# 构建最终响应
response_data = {
"total_files": len(files),
"success_files": success_files,
"total_records": total_records,
"details": results
}
print(response_data)
return Success(msg="Upload Successfully", data=response_data)
最新发布