2011-10-10 0:27:53

2011-10-10 0:27:53 


/** This structure contains the data a format has to probe a file. */
typedef struct AVProbeData {
    const char *filename;
    unsigned char *buf; /**< Buffer must have AVPROBE_PADDING_SIZE of extra allocated bytes filled with zero. */
    int buf_size; /**< Size of buf except extra allocated bytes */
} AVProbeData;




h264的探测函数:




static int h264_probe(AVProbeData *p)
{
    uint32_t code= -1;
    int sps=0, pps=0, idr=0, res=0, sli=0;
    int i;


    for(i=0; i<p->buf_size; i++){
        code = (code<<8) + p->buf[i];
        if ((code & 0xffffff00) == 0x100) {
            int ref_idc= (code>>5)&3;
            int type = code & 0x1F;
            static const int8_t ref_zero[32]={
                2, 0, 0, 0, 0,-1, 1,-1,
               -1, 1, 1, 1, 1,-1, 2, 2,
                2, 2, 2, 0, 2, 2, 2, 2,
                2, 2, 2, 2, 2, 2, 2, 2
            };


            if(code & 0x80) //forbidden bit


                return 0;


            if(ref_zero[type] == 1 && ref_idc)
                return 0;
            if(ref_zero[type] ==-1 && !ref_idc)
                return 0;
            if(ref_zero[type] == 2)
                res++;


            switch(type){
            case 1: sli++; break;
            case 5: idr++; break;
            case 7:
                if(p->buf[i+2]&0x0F)
                    return 0;
                sps++;
                break;
            case 8: pps++; break;
            }
        }
    }
    if(sps && pps && (idr||sli>3) && res<(sps+pps+idr))
        return AVPROBE_SCORE_MAX/2+1; // +1 for .mpg


    return 0;
}


解析是否为H.264

Script ===================== 工具路径 布局名称 问题图斑占用永久基本农田叠加分析图 索引图层 陇把镇\原始数据\问题图斑范围 排除图层 YJJBNTBHTB核实处置 输出文件夹 I:\占耕建房\新建文件夹 输出文件前缀 2 输出格式 JPEG ===================== 消息 开始时间: 2025年11月25日 10:08:28 [2025-11-25 10:08:28] INFO: ================================================================================ [2025-11-25 10:08:28] INFO: 开始处理布局: 问题图斑占用永久基本农田叠加分析图 [2025-11-25 10:08:28] INFO: 索引图层路径: 陇把镇\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 排除图层: YJJBNTBHTB核实处置 [2025-11-25 10:08:28] INFO: 输出文件夹: I:\占耕建房\新建文件夹 [2025-11-25 10:08:28] INFO: 输出前缀: 2 [2025-11-25 10:08:28] INFO: 输出格式: jpeg [2025-11-25 10:08:28] INFO: ================================================================================ [2025-11-25 10:08:28] INFO: 检查地图框 '地图框' 中的地图 '地图' [2025-11-25 10:08:28] INFO: 找到索引图层: 陇把镇\原始数据\问题图斑范围 (位于地图 '地图') [2025-11-25 10:08:28] INFO: 将应用过滤的图层: c533124陇川县_原始下发点_SpatialJoin7 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: c533124陇川县_原始下发点_SpatialJoin5 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\户撒乡 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\景罕镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\章凤镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\城子 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\户撒乡 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\景罕镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\章凤镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇图斑\城子 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 533124陇川县_原始下发点数据 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇川县2018-2024年农村宅基地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\主房_附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\主房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\主房_附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\主房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤拉勐村委会\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\附房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\院场1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\主房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\宗地_ClearZM_SpatialJoin [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\附房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\院场1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\主房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\宗地_ClearZM_SpatialJoin [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\附房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\院场1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\主房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\宗地_ClearZM_SpatialJoin [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\附房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\院场1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\主房1_ClearZM [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 章凤芒拉村委会\原始数据\宗地_ClearZM_SpatialJoin [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\房屋庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\房屋庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\房屋庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\房屋庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 景罕镇\景罕广宋\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\城子镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\章凤镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\景罕镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\户撒乡 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\城子镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\章凤镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\景罕镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 乡镇范围\户撒乡 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑庭院属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\压占永久基本农田图斑属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑附房用房属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑主房属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑庭院属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\压占永久基本农田图斑属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑附房用房属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 结构表\问题图斑主房属性结构表 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\主房附房庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\主房附房庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\庭院 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\附房 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\房屋建筑图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\宗地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇\原始数据\问题图斑范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 不动产\ZRZ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 不动产\SHYQZD [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 不动产\ZRZ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 不动产\SHYQZD [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 建设用地报批成果 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇川县2010年至2024年5月农转用 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 规划用地用海(国空) [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\GHDLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 村规\规划地类图斑 [2025-11-25 10:08:28] INFO: 排除图层: YJJBNTBHTB核实处置 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19变更二调耕地 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2019年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2018年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2017年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2016年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2015年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2014年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2013年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2012年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2011年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2010年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2019年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2018年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2017年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2016年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2015年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2014年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2013年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2012年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2011年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 10-19年变更\2010年DLTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇川坝区范围 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 陇把镇 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 信息采集-房屋图斑202510281127 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Annotation [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Point [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Polyline [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Polygon [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-MultiPatch [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Annotation [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Point [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Polyline [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-Polygon [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 2、陇把镇航飞总图纸.dwg\2、陇把镇航飞总图纸-MultiPatch [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\DLMCZJ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\线状地物 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\JSYDGZQ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\BQFW [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\GHJBNTBHQ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\TDGHDL [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\DLMCZJ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\地类图斑 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\线状地物 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\JSYDGZQ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\BQFW [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\GHJBNTBHQ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 二调\TDGHDL [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三调 [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\STBHHX [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\YJJBNTBHTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\GDBHMB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\CZKFBJ [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\STBHHX [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\YJJBNTBHTB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\GDBHMB [2025-11-25 10:08:28] INFO: 将应用过滤的图层: 三区三线\CZKFBJ [2025-11-25 10:08:28] INFO: 开始处理 138 个页面... [2025-11-25 10:08:29] INFO: 处理页面 1/138 [2025-11-25 10:08:29] ERROR: 处理页面 1 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:29] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:30] INFO: 处理页面 2/138 [2025-11-25 10:08:30] ERROR: 处理页面 2 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:30] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:31] INFO: 处理页面 3/138 [2025-11-25 10:08:31] ERROR: 处理页面 3 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:31] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:31] INFO: 处理页面 4/138 [2025-11-25 10:08:32] ERROR: 处理页面 4 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:32] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:33] INFO: 处理页面 5/138 [2025-11-25 10:08:33] ERROR: 处理页面 5 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:33] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:34] INFO: 处理页面 6/138 [2025-11-25 10:08:34] ERROR: 处理页面 6 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:34] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:35] INFO: 处理页面 7/138 [2025-11-25 10:08:35] ERROR: 处理页面 7 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:35] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:36] INFO: 处理页面 8/138 [2025-11-25 10:08:36] ERROR: 处理页面 8 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:36] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:37] INFO: 处理页面 9/138 [2025-11-25 10:08:37] ERROR: 处理页面 9 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:37] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:38] INFO: 处理页面 10/138 [2025-11-25 10:08:38] ERROR: 处理页面 10 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:38] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:39] INFO: 处理页面 11/138 [2025-11-25 10:08:39] ERROR: 处理页面 11 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:39] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:40] INFO: 处理页面 12/138 [2025-11-25 10:08:40] ERROR: 处理页面 12 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:40] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:41] INFO: 处理页面 13/138 [2025-11-25 10:08:41] ERROR: 处理页面 13 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:41] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:41] INFO: 处理页面 14/138 [2025-11-25 10:08:41] ERROR: 处理页面 14 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:41] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:42] INFO: 处理页面 15/138 [2025-11-25 10:08:42] ERROR: 处理页面 15 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:42] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:43] INFO: 处理页面 16/138 [2025-11-25 10:08:43] ERROR: 处理页面 16 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:43] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:44] INFO: 处理页面 17/138 [2025-11-25 10:08:44] ERROR: 处理页面 17 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:44] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:45] INFO: 处理页面 18/138 [2025-11-25 10:08:45] ERROR: 处理页面 18 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:45] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:46] INFO: 处理页面 19/138 [2025-11-25 10:08:46] ERROR: 处理页面 19 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:46] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:47] INFO: 处理页面 20/138 [2025-11-25 10:08:47] ERROR: 处理页面 20 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:47] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:48] INFO: 处理页面 21/138 [2025-11-25 10:08:48] ERROR: 处理页面 21 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:48] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:49] INFO: 处理页面 22/138 [2025-11-25 10:08:49] ERROR: 处理页面 22 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:49] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:50] INFO: 处理页面 23/138 [2025-11-25 10:08:50] ERROR: 处理页面 23 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:50] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:51] INFO: 处理页面 24/138 [2025-11-25 10:08:51] ERROR: 处理页面 24 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:51] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:52] INFO: 处理页面 25/138 [2025-11-25 10:08:52] ERROR: 处理页面 25 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:52] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:53] INFO: 处理页面 26/138 [2025-11-25 10:08:53] ERROR: 处理页面 26 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:53] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:54] INFO: 处理页面 27/138 [2025-11-25 10:08:54] ERROR: 处理页面 27 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:54] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:55] INFO: 处理页面 28/138 [2025-11-25 10:08:55] ERROR: 处理页面 28 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:55] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:55] INFO: 处理页面 29/138 [2025-11-25 10:08:55] ERROR: 处理页面 29 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:55] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:56] INFO: 处理页面 30/138 [2025-11-25 10:08:56] ERROR: 处理页面 30 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:56] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:57] INFO: 处理页面 31/138 [2025-11-25 10:08:57] ERROR: 处理页面 31 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:57] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:58] INFO: 处理页面 32/138 [2025-11-25 10:08:58] ERROR: 处理页面 32 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:58] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:59] INFO: 处理页面 33/138 [2025-11-25 10:08:59] ERROR: 处理页面 33 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:08:59] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:00] INFO: 处理页面 34/138 [2025-11-25 10:09:00] ERROR: 处理页面 34 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:00] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:01] INFO: 处理页面 35/138 [2025-11-25 10:09:01] ERROR: 处理页面 35 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:01] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:02] INFO: 处理页面 36/138 [2025-11-25 10:09:02] ERROR: 处理页面 36 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:02] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:03] INFO: 处理页面 37/138 [2025-11-25 10:09:03] ERROR: 处理页面 37 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:03] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:04] INFO: 处理页面 38/138 [2025-11-25 10:09:04] ERROR: 处理页面 38 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:04] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:05] INFO: 处理页面 39/138 [2025-11-25 10:09:05] ERROR: 处理页面 39 时出错: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:05] ERROR: Traceback (most recent call last): File "I:\arcgispro工具箱\自制工具箱.atbx\Script.tool\tool.script.execute.py", line 162, in main AttributeError: 'pageRow' object has no attribute 'getValue' [2025-11-25 10:09:06] INFO: 恢复图层原始状态... [2025-11-25 10:09:06] INFO: 恢复图层: c533124陇川县_原始下发点_SpatialJoin7 [2025-11-25 10:09:06] INFO: 恢复图层: c533124陇川县_原始下发点_SpatialJoin5 [2025-11-25 10:09:07] INFO: 恢复图层: 户撒乡 [2025-11-25 10:09:07] INFO: 恢复图层: 景罕镇 [2025-11-25 10:09:07] INFO: 恢复图层: 章凤镇 [2025-11-25 10:09:07] INFO: 恢复图层: 城子 [2025-11-25 10:09:07] INFO: 恢复图层: 户撒乡 [2025-11-25 10:09:07] INFO: 恢复图层: 景罕镇
11-26
-- MySQL dump 10.13 Distrib 8.0.42, for Win64 (x86_64) -- -- Host: 127.0.0.1 Database: tlias -- ------------------------------------------------------ -- Server version 8.0.42 /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!50503 SET NAMES utf8mb4 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- -- Table structure for table `clazz` -- DROP TABLE IF EXISTS `clazz`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!50503 SET character_set_client = utf8mb4 */; CREATE TABLE `clazz` ( `id` int unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID,主键', `name` varchar(30) NOT NULL COMMENT '班级名称', `room` varchar(20) DEFAULT NULL COMMENT '班级教室', `begin_date` date NOT NULL COMMENT '开课时间', `end_date` date NOT NULL COMMENT '结课时间', `master_id` int unsigned DEFAULT NULL COMMENT '班主任ID, 关联员工表ID', `subject` tinyint unsigned NOT NULL COMMENT '学科, 1:java, 2:前端, 3:大数据, 4:Python, 5:Go, 6: 嵌入式', `create_time` datetime DEFAULT NULL COMMENT '创建时间', `update_time` datetime DEFAULT NULL COMMENT '修改时间', PRIMARY KEY (`id`), UNIQUE KEY `name` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=11 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='班级表'; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `clazz` -- LOCK TABLES `clazz` WRITE; /*!40000 ALTER TABLE `clazz` DISABLE KEYS */; INSERT INTO `clazz` VALUES (1,'JavaEE就业163期','212','2025-04-30','2025-06-29',10,1,'2024-06-01 17:08:23','2024-06-01 17:39:58'),(2,'前端就业90期','210','2024-07-10','2024-01-20',3,2,'2024-06-01 17:45:12','2024-06-01 17:45:12'),(3,'JavaEE就业165期','108','2024-06-15','2024-12-25',6,1,'2024-06-01 17:45:40','2024-06-01 17:45:40'),(4,'JavaEE就业166期','105','2024-07-20','2024-02-20',20,1,'2024-06-01 17:46:10','2024-06-01 17:46:10'),(5,'大数据就业58期','209','2024-08-01','2024-02-15',7,3,'2024-06-01 17:51:21','2024-06-01 17:51:21'),(6,'JavaEE就业167期','325','2025-11-20','2026-05-10',36,1,'2024-11-15 11:35:46','2024-12-13 14:31:24'); /*!40000 ALTER TABLE `clazz` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `dept` -- DROP TABLE IF EXISTS `dept`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!50503 SET character_set_client = utf8mb4 */; CREATE TABLE `dept` ( `id` int unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID, 主键', `name` varchar(10) NOT NULL COMMENT '部门名称', `create_time` datetime DEFAULT NULL COMMENT '创建时间', `update_time` datetime DEFAULT NULL COMMENT '修改时间', PRIMARY KEY (`id`), UNIQUE KEY `name` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=25 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='部门表'; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `dept` -- LOCK TABLES `dept` WRITE; /*!40000 ALTER TABLE `dept` DISABLE KEYS */; INSERT INTO `dept` VALUES (1,'学工部','2025-05-04 22:32:58','2025-05-13 19:48:47'),(2,'教研部','2025-03-25 09:47:40','2025-03-25 15:17:04'),(3,'咨询部','2025-03-25 09:47:40','2025-03-30 21:26:24'),(4,'就业部','2025-03-25 09:47:40','2025-03-30 09:47:40'),(5,'人事部','2025-03-25 09:47:40','2025-03-30 09:47:40'),(10,'财务部','2025-05-04 22:33:52','2025-05-06 21:34:02'),(11,'后勤部','2025-05-04 23:55:36','2025-05-04 23:58:49'); /*!40000 ALTER TABLE `dept` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `emp` -- DROP TABLE IF EXISTS `emp`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!50503 SET character_set_client = utf8mb4 */; CREATE TABLE `emp` ( `id` int unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID,主键', `username` varchar(20) NOT NULL COMMENT '用户名', `password` varchar(32) DEFAULT '123456' COMMENT '密码', `name` varchar(10) NOT NULL COMMENT '姓名', `gender` tinyint unsigned NOT NULL COMMENT '性别, 1:男, 2:女', `phone` char(11) NOT NULL COMMENT '手机号', `job` tinyint unsigned DEFAULT NULL COMMENT '职位, 1 班主任, 2 讲师 , 3 学工主管, 4 教研主管, 5 咨询师', `salary` int unsigned DEFAULT NULL COMMENT '薪资', `image` varchar(255) DEFAULT NULL COMMENT '头像', `entry_date` date DEFAULT NULL COMMENT '入职日期', `dept_id` int unsigned DEFAULT NULL COMMENT '部门ID', `create_time` datetime DEFAULT NULL COMMENT '创建时间', `update_time` datetime DEFAULT NULL COMMENT '修改时间', PRIMARY KEY (`id`), UNIQUE KEY `username` (`username`), UNIQUE KEY `phone` (`phone`) ) ENGINE=InnoDB AUTO_INCREMENT=50 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='员工表'; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `emp` -- LOCK TABLES `emp` WRITE; /*!40000 ALTER TABLE `emp` DISABLE KEYS */; INSERT INTO `emp` VALUES (1,'shinaian','123456','施耐庵',1,'13309090001',4,15000,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2000-01-01',2,'2023-10-20 16:35:33','2023-11-16 16:11:26'),(2,'songjiang','123456','宋江',1,'13309090002',2,8600,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2015-01-01',2,'2023-10-20 16:35:33','2023-10-20 16:35:37'),(3,'lujunyi','123456','卢俊义',1,'13309090003',2,8900,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2008-05-01',2,'2023-10-20 16:35:33','2023-10-20 16:35:39'),(4,'wuyong','123456','吴用',1,'13309090004',2,9200,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2007-01-01',2,'2023-10-20 16:35:33','2023-10-20 16:35:41'),(5,'gongsunsheng','123456','公孙胜',1,'13309090005',2,9500,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2012-12-05',2,'2023-10-20 16:35:33','2023-10-20 16:35:43'),(6,'huosanniang','123456','扈三娘',2,'13309090006',3,6500,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2013-09-05',1,'2023-10-20 16:35:33','2023-10-20 16:35:45'),(7,'chaijin','123456','柴进',1,'13309090007',1,4700,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2005-08-01',1,'2023-10-20 16:35:33','2023-10-20 16:35:47'),(8,'likui','123456','李逵',1,'13309090008',1,4800,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2014-11-09',1,'2023-10-20 16:35:33','2023-10-20 16:35:49'),(9,'wusong','123456','武松',1,'13309090009',1,4900,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2011-03-11',1,'2023-10-20 16:35:33','2023-10-20 16:35:51'),(10,'linchong','123456','林冲',1,'13309090010',1,5000,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2013-09-05',1,'2023-10-20 16:35:33','2023-10-20 16:35:53'),(11,'huyanzhuo','123456','呼延灼',1,'13309090011',2,9700,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2007-02-01',2,'2023-10-20 16:35:33','2023-10-20 16:35:55'),(12,'xiaoliguang','123456','小李广',1,'13309090012',2,10000,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2008-08-18',2,'2023-10-20 16:35:33','2023-10-20 16:35:57'),(13,'yangzhi','123456','杨志',1,'13309090013',1,5300,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2012-11-01',1,'2023-10-20 16:35:33','2023-10-20 16:35:59'),(14,'shijin','123456','史进',1,'13309090014',2,10600,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2002-08-01',2,'2023-10-20 16:35:33','2023-10-20 16:36:01'),(15,'sunerniang','123456','孙二娘',2,'13309090015',2,10900,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2011-05-01',2,'2023-10-20 16:35:33','2023-10-20 16:36:03'),(16,'luzhishen','123456','鲁智深',1,'13309090016',2,9600,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2010-01-01',2,'2023-10-20 16:35:33','2023-10-20 16:36:05'),(17,'liying','55555','李应',1,'13309090017',1,5800,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2015-03-21',1,'2023-10-20 16:35:33','2025-05-11 14:33:07'),(18,'shiqian','123456','时迁',1,'13309090018',2,10200,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2015-01-01',2,'2023-10-20 16:35:33','2023-10-20 16:36:09'),(19,'gudasao','123456','顾大嫂',2,'13309090019',2,10500,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2008-01-01',2,'2023-10-20 16:35:33','2023-10-20 16:36:11'),(20,'ruanxiaoer','123456','阮小二',1,'13309090020',2,10800,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2018-01-01',2,'2023-10-20 16:35:33','2023-10-20 16:36:13'),(21,'ruanxiaowu','123456','阮小五',1,'13309090021',5,5200,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2015-01-01',3,'2023-10-20 16:35:33','2023-10-20 16:36:15'),(22,'ruanxiaoqi','123456','阮小七',1,'13309090022',5,5500,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2016-01-01',3,'2023-10-20 16:35:33','2023-10-20 16:36:17'),(23,'ruanji','123456','阮籍',1,'13309090023',5,5800,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2012-01-01',3,'2023-10-20 16:35:33','2023-10-20 16:36:19'),(24,'tongwei','123456','童威',1,'13309090024',5,5000,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2006-01-01',3,'2023-10-20 16:35:33','2023-10-20 16:36:21'),(25,'tongmeng','123456','童猛',1,'13309090025',5,4800,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2002-01-01',3,'2023-10-20 16:35:33','2023-10-20 16:36:23'),(26,'yanshun','123456','燕顺',1,'13309090026',5,5400,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2011-01-01',3,'2023-10-20 16:35:33','2023-11-08 22:12:46'),(27,'lijun','123456','李俊',1,'13309090027',2,6600,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2004-01-01',2,'2023-10-20 16:35:33','2023-11-16 17:56:59'),(28,'lizhong','123456','李忠',1,'13309090028',5,5000,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2007-01-01',3,'2023-10-20 16:35:33','2023-11-17 16:34:22'),(30,'liyun','123456','李云',1,'13309090030',NULL,NULL,'https://web-framework.oss-cn-hangzhou.aliyuncs.com/2023/1.jpg','2020-03-01',NULL,'2023-10-20 16:35:33','2023-10-20 16:36:31'),(36,'guoyujia','123456','郭宇佳',1,'13309243078',2,8001,'https://java-ai-28.oss-cn-beijing.aliyuncs.com/2025/06/c57f4f9c-941f-4ff8-8125-ec63f67ff5c2.jpg','2025-05-01',1,'2025-05-12 11:38:23','2025-06-19 16:39:44'),(44,'maodie','123456','耄耋',1,'13022111045',4,50000,'https://java-ai-28.oss-cn-beijing.aliyuncs.com/2025/05/f41cebb4-082c-4f84-b1c4-d0050412ac09.png','2025-03-15',10,'2025-05-10 23:49:59','2025-05-10 23:49:59'),(45,'pengqian','123456','彭谦',1,'18729903553',5,10000000,'https://java-ai-28.oss-cn-beijing.aliyuncs.com/2025/06/be8baca2-732b-490f-a56a-1d18b9b7c614.jpg','2025-06-02',3,'2025-06-02 23:19:54','2025-06-02 23:19:54'),(48,'fengborui','123456','冯博睿',1,'15888888887',2,3000,'https://java-ai-28.oss-cn-beijing.aliyuncs.com/2025/06/08ddac82-9432-47fd-8f0a-0850e4623440.jpg','2025-06-18',11,'2025-06-19 16:42:12','2025-06-19 16:42:12'); /*!40000 ALTER TABLE `emp` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `emp_expr` -- DROP TABLE IF EXISTS `emp_expr`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!50503 SET character_set_client = utf8mb4 */; CREATE TABLE `emp_expr` ( `id` int unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID, 主键', `emp_id` int unsigned DEFAULT NULL COMMENT '员工ID', `begin` date DEFAULT NULL COMMENT '开始时间', `end` date DEFAULT NULL COMMENT '结束时间', `company` varchar(50) DEFAULT NULL COMMENT '公司名称', `job` varchar(50) DEFAULT NULL COMMENT '职位', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='工作经历'; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `emp_expr` -- LOCK TABLES `emp_expr` WRITE; /*!40000 ALTER TABLE `emp_expr` DISABLE KEYS */; INSERT INTO `emp_expr` VALUES (6,44,'2022-05-05','2024-05-30','白手套','猫财神'),(7,45,'2025-03-12','2025-06-02','三角洲行动','摸金少校'),(8,48,'2025-03-13','2025-04-25','陕理工附中','校长'); /*!40000 ALTER TABLE `emp_expr` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `operate_log` -- DROP TABLE IF EXISTS `operate_log`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!50503 SET character_set_client = utf8mb4 */; CREATE TABLE `operate_log` ( `id` int unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID', `operate_emp_id` int unsigned DEFAULT NULL COMMENT '操作人ID', `operate_time` datetime DEFAULT NULL COMMENT '操作时间', `class_name` varchar(100) DEFAULT NULL COMMENT '操作的类名', `method_name` varchar(100) DEFAULT NULL COMMENT '操作的方法名', `method_params` varchar(2000) DEFAULT NULL COMMENT '方法参数', `return_value` varchar(2000) DEFAULT NULL COMMENT '返回值', `cost_time` bigint unsigned DEFAULT NULL COMMENT '方法执行耗时, 单位:ms', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='操作日志表'; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `operate_log` -- LOCK TABLES `operate_log` WRITE; /*!40000 ALTER TABLE `operate_log` DISABLE KEYS */; INSERT INTO `operate_log` VALUES (1,1,'2025-05-14 16:17:14','org.example.controller.DeptController','add','[Dept(id=null, name=678, createTime=2025-05-14T16:17:13.590562200, updateTime=2025-05-14T16:17:13.590562200)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',12),(2,1,'2025-05-14 16:17:20','org.example.controller.DeptController','update','[Dept(id=12, name=678666, createTime=2025-05-14T16:17:14, updateTime=2025-05-14T16:17:20.396796900)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',6),(3,1,'2025-05-14 16:17:23','org.example.controller.DeptController','delete','[12]','{\"code\":1,\"msg\":\"success\",\"data\":null}',6),(4,36,'2025-05-14 23:52:40','org.example.controller.DeptController','add','[Dept(id=null, name=666, createTime=2025-05-14T23:52:39.992155700, updateTime=2025-05-14T23:52:39.992155700)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',28),(5,36,'2025-05-14 23:52:44','org.example.controller.DeptController','update','[Dept(id=13, name=666888, createTime=2025-05-14T23:52:40, updateTime=2025-05-14T23:52:44.173557500)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',8),(6,36,'2025-05-14 23:52:46','org.example.controller.DeptController','delete','[13]','{\"code\":1,\"msg\":\"success\",\"data\":null}',7),(7,NULL,'2025-05-27 20:07:38','org.example.controller.DeptController','add','[Dept(id=null, name=, createTime=2025-05-27T20:07:38.466774100, updateTime=2025-05-27T20:07:38.466774100)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',29),(8,NULL,'2025-05-29 15:35:25','org.example.controller.DeptController','add','[Dept(id=null, name=搞笑部, createTime=2025-05-29T15:35:25.066868500, updateTime=2025-05-29T15:35:25.066868500)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',15),(9,NULL,'2025-05-29 15:35:45','org.example.controller.DeptController','add','[Dept(id=null, name=搞笑部, createTime=2025-05-29T15:35:44.727359400, updateTime=2025-05-29T15:35:44.727359400)]','null',399),(10,NULL,'2025-05-29 15:36:43','org.example.controller.DeptController','add','[Dept(id=null, name=搞笑部, createTime=2025-05-29T15:36:43.372420500, updateTime=2025-05-29T15:36:43.372420500)]','null',4),(11,NULL,'2025-05-29 16:47:32','org.example.controller.DeptController','add','[Dept(id=null, name=, createTime=2025-05-29T16:47:31.852988600, updateTime=2025-05-29T16:47:31.852988600)]','null',5),(12,NULL,'2025-05-29 16:52:17','org.example.controller.DeptController','add','[Dept(id=null, name=32, createTime=2025-05-29T16:52:16.564102800, updateTime=2025-05-29T16:52:16.564102800)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',5),(13,NULL,'2025-05-29 17:12:29','org.example.controller.DeptController','add','[Dept(id=null, name=搞笑部, createTime=2025-05-29T17:12:29.474613400, updateTime=2025-05-29T17:12:29.474613400)]','null',5),(14,NULL,'2025-05-29 18:03:31','org.example.controller.DeptController','update','[Dept(id=14, name=123, createTime=2025-05-27T20:07:38, updateTime=2025-05-29T18:03:30.662468)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',9),(15,NULL,'2025-05-29 18:04:06','org.example.controller.DeptController','update','[Dept(id=19, name=233333, createTime=2025-05-29T16:52:17, updateTime=2025-05-29T18:04:06.350611100)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',4),(16,NULL,'2025-05-29 18:32:13','org.example.controller.DeptController','delete','[19]','{\"code\":1,\"msg\":\"success\",\"data\":null}',15),(17,NULL,'2025-05-29 18:34:17','org.example.controller.DeptController','delete','[14]','{\"code\":1,\"msg\":\"success\",\"data\":null}',4),(18,NULL,'2025-05-29 18:37:32','org.example.controller.DeptController','delete','[15]','{\"code\":1,\"msg\":\"success\",\"data\":null}',5),(19,NULL,'2025-05-29 18:38:58','org.example.controller.DeptController','add','[Dept(id=null, name=沙增乐部, createTime=2025-05-29T18:38:57.904253900, updateTime=2025-05-29T18:38:57.904253900)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',4),(20,NULL,'2025-05-29 18:39:03','org.example.controller.DeptController','update','[Dept(id=21, name=沙增乐, createTime=2025-05-29T18:38:58, updateTime=2025-05-29T18:39:02.502901)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',3),(21,NULL,'2025-05-29 18:39:15','org.example.controller.DeptController','delete','[21]','{\"code\":1,\"msg\":\"success\",\"data\":null}',3),(22,NULL,'2025-05-29 22:27:39','org.example.controller.DeptController','add','[Dept(id=null, name=asdasdasd, createTime=2025-05-29T22:27:39.396613400, updateTime=2025-05-29T22:27:39.396613400)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',5),(23,NULL,'2025-05-29 22:27:57','org.example.controller.DeptController','delete','[22]','{\"code\":1,\"msg\":\"success\",\"data\":null}',3),(24,NULL,'2025-06-02 21:11:31','org.example.controller.DeptController','add','[Dept(id=null, name=沙增乐部, createTime=2025-06-02T21:11:31.464335100, updateTime=2025-06-02T21:11:31.464335100)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',24),(25,NULL,'2025-06-02 21:11:44','org.example.controller.DeptController','delete','[23]','{\"code\":1,\"msg\":\"success\",\"data\":null}',9),(26,NULL,'2025-06-02 21:12:13','org.example.controller.DeptController','add','[Dept(id=null, name=456, createTime=2025-06-02T21:12:12.517675600, updateTime=2025-06-02T21:12:12.517675600)]','{\"code\":1,\"msg\":\"success\",\"data\":null}',4),(27,NULL,'2025-06-02 21:17:43','org.example.controller.DeptController','delete','[24]','{\"code\":1,\"msg\":\"success\",\"data\":null}',5); /*!40000 ALTER TABLE `operate_log` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `student` -- DROP TABLE IF EXISTS `student`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!50503 SET character_set_client = utf8mb4 */; CREATE TABLE `student` ( `id` int unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID,主键', `name` varchar(10) NOT NULL COMMENT '姓名', `no` char(10) NOT NULL COMMENT '学号', `gender` tinyint unsigned NOT NULL COMMENT '性别, 1: 男, 2: 女', `phone` varchar(11) NOT NULL COMMENT '手机号', `id_card` char(18) NOT NULL COMMENT '身份证号', `is_college` tinyint unsigned NOT NULL COMMENT '是否来自于院校, 1:是, 0:否', `address` varchar(100) DEFAULT NULL COMMENT '联系地址', `degree` tinyint unsigned DEFAULT NULL COMMENT '最高学历, 1:初中, 2:高中, 3:大专, 4:本科, 5:硕士, 6:博士', `graduation_date` date DEFAULT NULL COMMENT '毕业时间', `clazz_id` int unsigned NOT NULL COMMENT '班级ID, 关联班级表ID', `violation_count` tinyint unsigned NOT NULL DEFAULT '0' COMMENT '违纪次数', `violation_score` tinyint unsigned NOT NULL DEFAULT '0' COMMENT '违纪扣分', `create_time` datetime DEFAULT NULL COMMENT '创建时间', `update_time` datetime DEFAULT NULL COMMENT '修改时间', PRIMARY KEY (`id`), UNIQUE KEY `no` (`no`), UNIQUE KEY `phone` (`phone`), UNIQUE KEY `id_card` (`id_card`) ) ENGINE=InnoDB AUTO_INCREMENT=19 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='学员表'; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `student` -- LOCK TABLES `student` WRITE; /*!40000 ALTER TABLE `student` DISABLE KEYS */; INSERT INTO `student` VALUES (1,'段誉','2022000001',1,'18800000001','110120000300200001',1,'北京市昌平区建材城西路1号',1,'2021-07-01',2,0,0,'2024-11-14 21:22:19','2024-11-15 16:20:59'),(2,'萧峰','2022000002',1,'18800210003','110120000300200002',1,'北京市昌平区建材城西路2号',2,'2022-07-01',1,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(3,'虚竹','2022000003',1,'18800013001','110120000300200003',1,'北京市昌平区建材城西路3号',2,'2024-07-01',1,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(4,'萧远山','2022000004',1,'18800003211','110120000300200004',1,'北京市昌平区建材城西路4号',3,'2024-07-01',1,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(5,'阿朱','2022000005',2,'18800160002','110120000300200005',1,'北京市昌平区建材城西路5号',4,'2020-07-01',1,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(6,'阿紫','2022000006',2,'18800000034','110120000300200006',1,'北京市昌平区建材城西路6号',4,'2021-07-01',2,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(7,'游坦之','2022000007',1,'18800000067','110120000300200007',1,'北京市昌平区建材城西路7号',4,'2022-07-01',2,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(8,'康敏','2022000008',2,'18800000077','110120000300200008',1,'北京市昌平区建材城西路8号',5,'2024-07-01',2,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(9,'徐长老','2022000009',1,'18800000341','110120000300200009',1,'北京市昌平区建材城西路9号',3,'2024-07-01',2,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(10,'云中鹤','2022000010',1,'18800006571','110120000300200010',1,'北京市昌平区建材城西路10号',2,'2020-07-01',2,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(11,'钟万仇','2022000011',1,'18800000391','110120000300200011',1,'北京市昌平区建材城西路11号',4,'2021-07-01',1,0,0,'2024-11-14 21:22:19','2024-11-15 16:21:24'),(12,'崔百泉','2022000012',1,'18800000781','110120000300200018',1,'北京市昌平区建材城西路12号',4,'2022-07-05',3,6,17,'2024-11-14 21:22:19','2024-12-13 14:33:58'),(13,'耶律洪基','2022000013',1,'18800008901','110120000300200013',1,'北京市昌平区建材城西路13号',4,'2024-07-01',2,0,0,'2024-11-14 21:22:19','2024-11-15 16:21:21'),(14,'天山童姥','2022000014',2,'18800009201','110120000300200014',1,'北京市昌平区建材城西路14号',4,'2024-07-01',1,0,0,'2024-11-14 21:22:19','2024-11-15 16:21:17'),(15,'刘竹庄','2022000015',1,'18800009401','110120000300200015',1,'北京市昌平区建材城西路15号',3,'2020-07-01',4,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(16,'李春来','2022000016',1,'18800008501','110120000300200016',1,'北京市昌平区建材城西路16号',4,'2021-07-01',4,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(17,'王语嫣','2022000017',2,'18800007601','110120000300200017',1,'北京市昌平区建材城西路17号',2,'2022-07-01',4,0,0,'2024-11-14 21:22:19','2024-11-14 21:22:19'),(18,'郑成功','2024001101',1,'13309092345','110110110110110110',0,'北京市昌平区回龙观街道88号',5,'2021-07-01',3,2,7,'2024-11-15 16:26:18','2024-11-15 16:40:10'); /*!40000 ALTER TABLE `student` ENABLE KEYS */; UNLOCK TABLES; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2025-06-19 20:26:22 为什么在Navicat Premium 15中运行不了
06-20
2025-07-02 11:15:25,551 INFO - task run command: sudo -u hadoop -E bash /tmp/dolphinscheduler/exec/process/hadoop/16836554651104/18167664743392_10/32581/56672/32581_56672.command 2025-07-02 11:15:25,552 INFO - process start, process id is: 1190 2025-07-02 11:15:26,553 INFO - -> /usr/lib/dolphinscheduler/worker-server/conf/dolphinscheduler_env.sh: line 23: export: `zookeeper.quorum=': not a valid identifier /usr/lib/dolphinscheduler/worker-server/conf/dolphinscheduler_env.sh: line 23: export: `dominos-usdp-fun01:2181,dominos-usdp-fun02:2181,dominos-usdp-fun03:2181': not a valid identifier 2025-07-02 11:15:31,554 INFO - -> SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/usr/lib/hadoop/lib/slf4j-reload4j-1.7.36.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/usr/lib/hive/lib/log4j-slf4j-impl-2.17.1.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Reload4jLoggerFactory] 2025-07-02 11:15:31,226 INFO [main] conf.HiveConf (HiveConf.java:findConfigFile(187)) - Found configuration file file:/etc/hive/conf/hive-site.xml 2025-07-02 11:15:32,554 INFO - -> 2025-07-02 11:15:32,428 main ERROR Cannot access RandomAccessFile java.io.FileNotFoundException: /data/log/hive/hive.log (Permission denied) java.io.FileNotFoundException: /data/log/hive/hive.log (Permission denied) at java.io.RandomAccessFile.open0(Native Method) at java.io.RandomAccessFile.open(RandomAccessFile.java:316) at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243) at java.io.RandomAccessFile.<init>(RandomAccessFile.java:124) at org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager$RollingRandomAccessFileManagerFactory.createManager(RollingRandomAccessFileManager.java:232) at org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager$RollingRandomAccessFileManagerFactory.createManager(RollingRandomAccessFileManager.java:204) at org.apache.logging.log4j.core.appender.AbstractManager.getManager(AbstractManager.java:114) at org.apache.logging.log4j.core.appender.OutputStreamManager.getManager(OutputStreamManager.java:100) at org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager.getRollingRandomAccessFileManager(RollingRandomAccessFileManager.java:107) at org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender$Builder.build(RollingRandomAccessFileAppender.java:132) at org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender$Builder.build(RollingRandomAccessFileAppender.java:53) at org.apache.logging.log4j.core.config.plugins.util.PluginBuilder.build(PluginBuilder.java:122) at org.apache.logging.log4j.core.config.AbstractConfiguration.createPluginObject(AbstractConfiguration.java:1120) at org.apache.logging.log4j.core.config.AbstractConfiguration.createConfiguration(AbstractConfiguration.java:1045) at org.apache.logging.log4j.core.config.AbstractConfiguration.createConfiguration(AbstractConfiguration.java:1037) at org.apache.logging.log4j.core.config.AbstractConfiguration.doConfigure(AbstractConfiguration.java:651) at org.apache.logging.log4j.core.config.AbstractConfiguration.initialize(AbstractConfiguration.java:247) at org.apache.logging.log4j.core.config.AbstractConfiguration.start(AbstractConfiguration.java:293) at org.apache.logging.log4j.core.LoggerContext.setConfiguration(LoggerContext.java:626) at org.apache.logging.log4j.core.LoggerContext.start(LoggerContext.java:302) at org.apache.logging.log4j.core.async.AsyncLoggerContext.start(AsyncLoggerContext.java:87) at org.apache.logging.log4j.core.impl.Log4jContextFactory.getContext(Log4jContextFactory.java:242) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:159) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:131) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:101) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:210) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jDefault(LogUtils.java:173) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:106) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:98) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4j(LogUtils.java:81) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:699) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:683) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:323) at org.apache.hadoop.util.RunJar.main(RunJar.java:236) 2025-07-02 11:15:32,430 main ERROR Could not create plugin of type class org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender for element RollingRandomAccessFile: java.lang.IllegalStateException: ManagerFactory [org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager$RollingRandomAccessFileManagerFactory@5ef6ae06] unable to create manager for [/data/log/hive/hive.log] with data [org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager$FactoryData@55dfebeb] java.lang.IllegalStateException: ManagerFactory [org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager$RollingRandomAccessFileManagerFactory@5ef6ae06] unable to create manager for [/data/log/hive/hive.log] with data [org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager$FactoryData@55dfebeb] at org.apache.logging.log4j.core.appender.AbstractManager.getManager(AbstractManager.java:116) at org.apache.logging.log4j.core.appender.OutputStreamManager.getManager(OutputStreamManager.java:100) at org.apache.logging.log4j.core.appender.rolling.RollingRandomAccessFileManager.getRollingRandomAccessFileManager(RollingRandomAccessFileManager.java:107) at org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender$Builder.build(RollingRandomAccessFileAppender.java:132) at org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender$Builder.build(RollingRandomAccessFileAppender.java:53) at org.apache.logging.log4j.core.config.plugins.util.PluginBuilder.build(PluginBuilder.java:122) at org.apache.logging.log4j.core.config.AbstractConfiguration.createPluginObject(AbstractConfiguration.java:1120) at org.apache.logging.log4j.core.config.AbstractConfiguration.createConfiguration(AbstractConfiguration.java:1045) at org.apache.logging.log4j.core.config.AbstractConfiguration.createConfiguration(AbstractConfiguration.java:1037) at org.apache.logging.log4j.core.config.AbstractConfiguration.doConfigure(AbstractConfiguration.java:651) at org.apache.logging.log4j.core.config.AbstractConfiguration.initialize(AbstractConfiguration.java:247) at org.apache.logging.log4j.core.config.AbstractConfiguration.start(AbstractConfiguration.java:293) at org.apache.logging.log4j.core.LoggerContext.setConfiguration(LoggerContext.java:626) at org.apache.logging.log4j.core.LoggerContext.start(LoggerContext.java:302) at org.apache.logging.log4j.core.async.AsyncLoggerContext.start(AsyncLoggerContext.java:87) at org.apache.logging.log4j.core.impl.Log4jContextFactory.getContext(Log4jContextFactory.java:242) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:159) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:131) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:101) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:210) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jDefault(LogUtils.java:173) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:106) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:98) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4j(LogUtils.java:81) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:699) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:683) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:323) at org.apache.hadoop.util.RunJar.main(RunJar.java:236) 2025-07-02 11:15:32,431 main ERROR Unable to invoke factory method in class org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender for element RollingRandomAccessFile: java.lang.IllegalStateException: No factory method found for class org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender java.lang.IllegalStateException: No factory method found for class org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender at org.apache.logging.log4j.core.config.plugins.util.PluginBuilder.findFactoryMethod(PluginBuilder.java:236) at org.apache.logging.log4j.core.config.plugins.util.PluginBuilder.build(PluginBuilder.java:134) at org.apache.logging.log4j.core.config.AbstractConfiguration.createPluginObject(AbstractConfiguration.java:1120) at org.apache.logging.log4j.core.config.AbstractConfiguration.createConfiguration(AbstractConfiguration.java:1045) at org.apache.logging.log4j.core.config.AbstractConfiguration.createConfiguration(AbstractConfiguration.java:1037) at org.apache.logging.log4j.core.config.AbstractConfiguration.doConfigure(AbstractConfiguration.java:651) at org.apache.logging.log4j.core.config.AbstractConfiguration.initialize(AbstractConfiguration.java:247) at org.apache.logging.log4j.core.config.AbstractConfiguration.start(AbstractConfiguration.java:293) at org.apache.logging.log4j.core.LoggerContext.setConfiguration(LoggerContext.java:626) at org.apache.logging.log4j.core.LoggerContext.start(LoggerContext.java:302) at org.apache.logging.log4j.core.async.AsyncLoggerContext.start(AsyncLoggerContext.java:87) at org.apache.logging.log4j.core.impl.Log4jContextFactory.getContext(Log4jContextFactory.java:242) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:159) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:131) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:101) at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:210) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jDefault(LogUtils.java:173) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:106) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:98) at org.apache.hadoop.hive.common.LogUtils.initHiveLog4j(LogUtils.java:81) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:699) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:683) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:323) at org.apache.hadoop.util.RunJar.main(RunJar.java:236) 2025-07-02 11:15:32,432 main ERROR Null object returned for RollingRandomAccessFile in Appenders. 2025-07-02 11:15:32,432 main ERROR Unable to locate appender "DRFA" for logger config "root" Hive Session ID = 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:32,528 INFO [main] SessionState (SessionState.java:printInfo(1227)) - Hive Session ID = 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:33,555 INFO - -> Logging initialized using configuration in file:/etc/hive/conf/hive-log4j2.properties Async: true 2025-07-02 11:15:32,577 INFO [main] SessionState (SessionState.java:printInfo(1227)) - Logging initialized using configuration in file:/etc/hive/conf/hive-log4j2.properties Async: true 2025-07-02 11:15:34,556 INFO - -> 2025-07-02 11:15:33,630 INFO [main] session.SessionState (SessionState.java:createPath(790)) - Created HDFS directory: /tmp/hive/hadoop/63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:33,652 INFO [main] session.SessionState (SessionState.java:createPath(790)) - Created local directory: /tmp/hadoop/63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:33,659 INFO [main] session.SessionState (SessionState.java:createPath(790)) - Created HDFS directory: /tmp/hive/hadoop/63fc22ae-87a3-4d13-b59e-6ea5a99a9941/_tmp_space.db 2025-07-02 11:15:33,691 INFO [main] tez.TezSessionState (TezSessionState.java:openInternal(277)) - User of session id 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 is hadoop 2025-07-02 11:15:33,714 INFO [main] tez.DagUtils (DagUtils.java:localizeResource(1159)) - Localizing resource because it does not exist: file:/usr/lib/hive/auxlib/hudi-hadoop-mr-bundle-0.13.0.jar to dest: hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/63fc22ae-87a3-4d13-b59e-6ea5a99a9941-resources/hudi-hadoop-mr-bundle-0.13.0.jar 2025-07-02 11:15:34,365 INFO [main] tez.DagUtils (DagUtils.java:createLocalResource(842)) - Resource modification time: 1751426134293 for hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/63fc22ae-87a3-4d13-b59e-6ea5a99a9941-resources/hudi-hadoop-mr-bundle-0.13.0.jar 2025-07-02 11:15:34,384 INFO [main] tez.DagUtils (DagUtils.java:localizeResource(1159)) - Localizing resource because it does not exist: file:/usr/lib/hive/auxlib/hudi-hive-sync-bundle-0.13.0.jar to dest: hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/63fc22ae-87a3-4d13-b59e-6ea5a99a9941-resources/hudi-hive-sync-bundle-0.13.0.jar 2025-07-02 11:15:35,557 INFO - -> 2025-07-02 11:15:34,776 INFO [main] tez.DagUtils (DagUtils.java:createLocalResource(842)) - Resource modification time: 1751426134737 for hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/63fc22ae-87a3-4d13-b59e-6ea5a99a9941-resources/hudi-hive-sync-bundle-0.13.0.jar 2025-07-02 11:15:34,851 INFO [main] tez.TezSessionState (TezSessionState.java:openInternal(288)) - Created new resources: null 2025-07-02 11:15:34,854 INFO [main] tez.DagUtils (DagUtils.java:getHiveJarDirectory(1058)) - Jar dir is null / directory doesn't exist. Choosing HIVE_INSTALL_DIR - /user/hadoop/.hiveJars 2025-07-02 11:15:35,179 INFO [main] tez.TezSessionState (TezSessionState.java:getSha(854)) - Computed sha: 3420a6126cfea97266fe35b708da5d5f95a5b158cad390dc4124081a39cf906f for file: file:/usr/lib/hive/lib/hive-exec-3.1.3.jar of length: 40.36MB in 321 ms 2025-07-02 11:15:35,191 INFO [main] tez.DagUtils (DagUtils.java:createLocalResource(842)) - Resource modification time: 1715146950410 for hdfs://dominos-usdp-v3-fun/user/hadoop/.hiveJars/hive-exec-3.1.3-3420a6126cfea97266fe35b708da5d5f95a5b158cad390dc4124081a39cf906f.jar 2025-07-02 11:15:35,240 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.io.sort.mb, mr initial value=100, tez(original):tez.runtime.io.sort.mb=null, tez(final):tez.runtime.io.sort.mb=100 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.read.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.read.timeout=null, tez(final):tez.runtime.shuffle.read.timeout=180000 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.minimum-allowed-tasks, mr initial value=10, tez(original):tez.am.minimum.allowed.speculative.tasks=null, tez(final):tez.am.minimum.allowed.speculative.tasks=10 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.ifile.readahead.bytes, mr initial value=4194304, tez(original):tez.runtime.ifile.readahead.bytes=null, tez(final):tez.runtime.ifile.readahead.bytes=4194304 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.shuffle.ssl.enabled, mr initial value=false, tez(original):tez.runtime.shuffle.ssl.enable=null, tez(final):tez.runtime.shuffle.ssl.enable=false 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.map.sort.spill.percent, mr initial value=0.80, tez(original):tez.runtime.sort.spill.percent=null, tez(final):tez.runtime.sort.spill.percent=0.80 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.speculative-cap-running-tasks, mr initial value=0.1, tez(original):tez.am.proportion.running.tasks.speculatable=null, tez(final):tez.am.proportion.running.tasks.speculatable=0.1 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.speculative-cap-total-tasks, mr initial value=0.01, tez(original):tez.am.proportion.total.tasks.speculatable=null, tez(final):tez.am.proportion.total.tasks.speculatable=0.01 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.ifile.readahead, mr initial value=true, tez(original):tez.runtime.ifile.readahead=null, tez(final):tez.runtime.ifile.readahead=true 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.merge.percent, mr initial value=0.66, tez(original):tez.runtime.shuffle.merge.percent=null, tez(final):tez.runtime.shuffle.merge.percent=0.66 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.parallelcopies, mr initial value=50, tez(original):tez.runtime.shuffle.parallel.copies=null, tez(final):tez.runtime.shuffle.parallel.copies=50 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.retry-after-speculate, mr initial value=15000, tez(original):tez.am.soonest.retry.after.speculate=null, tez(final):tez.am.soonest.retry.after.speculate=15000 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.reduce.slowstart.completedmaps, mr initial value=0.95, tez(original):tez.shuffle-vertex-manager.min-src-fraction=null, tez(final):tez.shuffle-vertex-manager.min-src-fraction=0.95 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.memory.limit.percent, mr initial value=0.25, tez(original):tez.runtime.shuffle.memory.limit.percent=null, tez(final):tez.runtime.shuffle.memory.limit.percent=0.25 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.retry-after-no-speculate, mr initial value=1000, tez(original):tez.am.soonest.retry.after.no.speculate=null, tez(final):tez.am.soonest.retry.after.no.speculate=1000 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.io.sort.factor, mr initial value=100, tez(original):tez.runtime.io.sort.factor=null, tez(final):tez.runtime.io.sort.factor=100 2025-07-02 11:15:35,241 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.map.output.compress, mr initial value=false, tez(original):tez.runtime.compress=null, tez(final):tez.runtime.compress=false 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.connect.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.connect.timeout=null, tez(final):tez.runtime.shuffle.connect.timeout=180000 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.input.buffer.percent, mr initial value=0.0, tez(original):tez.runtime.task.input.post-merge.buffer.percent=null, tez(final):tez.runtime.task.input.post-merge.buffer.percent=0.0 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.map.output.compress.codec, mr initial value=org.apache.hadoop.io.compress.DefaultCodec, tez(original):tez.runtime.compress.codec=null, tez(final):tez.runtime.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.merge.progress.records, mr initial value=10000, tez(original):tez.runtime.merge.progress.records=null, tez(final):tez.runtime.merge.progress.records=10000 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):map.sort.class, mr initial value=org.apache.hadoop.util.QuickSort, tez(original):tez.runtime.internal.sorter.class=null, tez(final):tez.runtime.internal.sorter.class=org.apache.hadoop.util.QuickSort 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.input.buffer.percent, mr initial value=0.70, tez(original):tez.runtime.shuffle.fetch.buffer.percent=null, tez(final):tez.runtime.shuffle.fetch.buffer.percent=0.70 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.counters.max, mr initial value=120, tez(original):tez.counters.max=null, tez(final):tez.counters.max=120 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.hdfs-servers, mr initial value=hdfs://dominos-usdp-v3-fun, tez(original):tez.job.fs-servers=null, tez(final):tez.job.fs-servers=hdfs://dominos-usdp-v3-fun 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.queuename, mr initial value=default, tez(original):tez.queue.name=default, tez(final):tez.queue.name=default 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.maxtaskfailures.per.tracker, mr initial value=3, tez(original):tez.am.maxtaskfailures.per.node=null, tez(final):tez.am.maxtaskfailures.per.node=3 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.timeout, mr initial value=600000, tez(original):tez.task.timeout-ms=null, tez(final):tez.task.timeout-ms=600000 2025-07-02 11:15:35,242 INFO [main] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):yarn.app.mapreduce.am.job.task.listener.thread-count, mr initial value=30, tez(original):tez.am.task.listener.thread-count=null, tez(final):tez.am.task.listener.thread-count=30 2025-07-02 11:15:35,261 INFO [main] sqlstd.SQLStdHiveAccessController (SQLStdHiveAccessController.java:<init>(96)) - Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=63fc22ae-87a3-4d13-b59e-6ea5a99a9941, clientType=HIVECLI] 2025-07-02 11:15:35,263 WARN [main] session.SessionState (SessionState.java:setAuthorizerV2Config(950)) - METASTORE_FILTER_HOOK will be ignored, since hive.security.authorization.manager is set to instance of HiveAuthorizerFactory. 2025-07-02 11:15:35,328 INFO [main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(441)) - Trying to connect to metastore with URI thrift://dc3-dominos-usdp-fun01:9083 2025-07-02 11:15:35,350 INFO [main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(517)) - Opened a connection to metastore, current connections: 1 2025-07-02 11:15:35,358 INFO [main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(570)) - Connected to metastore. 2025-07-02 11:15:35,358 INFO [main] metastore.RetryingMetaStoreClient (RetryingMetaStoreClient.java:<init>(97)) - RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hadoop (auth:SIMPLE) retries=1 delay=1 lifetime=0 2025-07-02 11:15:36,558 INFO - -> 2025-07-02 11:15:35,864 INFO [main] counters.Limits (Limits.java:init(61)) - Counter limits initialized with parameters: GROUP_NAME_MAX=256, MAX_GROUPS=500, COUNTER_NAME_MAX=64, MAX_COUNTERS=1200 2025-07-02 11:15:35,864 INFO [main] counters.Limits (Limits.java:init(61)) - Counter limits initialized with parameters: GROUP_NAME_MAX=256, MAX_GROUPS=500, COUNTER_NAME_MAX=64, MAX_COUNTERS=120 2025-07-02 11:15:35,864 INFO [main] client.TezClient (TezClient.java:<init>(210)) - Tez Client Version: [ component=tez-api, version=0.10.2, revision=22f46fe39a7cf99b24275304e99867b9135caba2, SCM-URL=scm:git:https://gitbox.apache.org/repos/asf/tez.git, buildTime=2023-02-08T02:24:56Z, buildUser=jenkins, buildJavaVersion=1.8.0_362 ] 2025-07-02 11:15:35,864 INFO [main] tez.TezSessionState (TezSessionState.java:openInternal(363)) - Opening new Tez Session (id: 63fc22ae-87a3-4d13-b59e-6ea5a99a9941, scratch dir: hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/63fc22ae-87a3-4d13-b59e-6ea5a99a9941) 2025-07-02 11:15:35,884 INFO [main] conf.HiveConf (HiveConf.java:getLogIdVar(5037)) - Using the default value passed in for log id: 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:35,884 INFO [main] session.SessionState (SessionState.java:updateThreadName(441)) - Updating thread name to 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main 2025-07-02 11:15:35,954 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:isCompatibleWith(346)) - Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook to org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl 2025-07-02 11:15:35,958 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:close(600)) - Closed a connection to metastore, current connections: 0 2025-07-02 11:15:36,152 INFO [Tez session start thread] impl.TimelineReaderClientImpl (TimelineReaderClientImpl.java:serviceInit(97)) - Initialized TimelineReader URI=http://dc3-dominos-usdp-fun02:8198/ws/v2/timeline/, clusterId=dominos-usdp-v3-fun 2025-07-02 11:15:36,342 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(441)) - Trying to connect to metastore with URI thrift://dc3-dominos-usdp-fun01:9083 2025-07-02 11:15:36,344 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(517)) - Opened a connection to metastore, current connections: 1 2025-07-02 11:15:36,345 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(570)) - Connected to metastore. 2025-07-02 11:15:36,345 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.RetryingMetaStoreClient (RetryingMetaStoreClient.java:<init>(97)) - RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hadoop (auth:SIMPLE) retries=1 delay=1 lifetime=0 Hive Session ID = 4caadf81-0f27-469e-8de0-87e177d910e3 2025-07-02 11:15:36,366 INFO [pool-7-thread-1] SessionState (SessionState.java:printInfo(1227)) - Hive Session ID = 4caadf81-0f27-469e-8de0-87e177d910e3 2025-07-02 11:15:36,385 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] conf.HiveConf (HiveConf.java:getLogIdVar(5037)) - Using the default value passed in for log id: 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:36,386 INFO [pool-7-thread-1] session.SessionState (SessionState.java:createPath(790)) - Created HDFS directory: /tmp/hive/hadoop/4caadf81-0f27-469e-8de0-87e177d910e3 2025-07-02 11:15:36,412 INFO [pool-7-thread-1] session.SessionState (SessionState.java:createPath(790)) - Created local directory: /tmp/hadoop/4caadf81-0f27-469e-8de0-87e177d910e3 2025-07-02 11:15:36,420 INFO [pool-7-thread-1] session.SessionState (SessionState.java:createPath(790)) - Created HDFS directory: /tmp/hive/hadoop/4caadf81-0f27-469e-8de0-87e177d910e3/_tmp_space.db 2025-07-02 11:15:36,420 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:openInternal(277)) - User of session id 4caadf81-0f27-469e-8de0-87e177d910e3 is hadoop 2025-07-02 11:15:36,441 INFO [pool-7-thread-1] tez.DagUtils (DagUtils.java:localizeResource(1159)) - Localizing resource because it does not exist: file:/usr/lib/hive/auxlib/hudi-hadoop-mr-bundle-0.13.0.jar to dest: hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/4caadf81-0f27-469e-8de0-87e177d910e3-resources/hudi-hadoop-mr-bundle-0.13.0.jar 2025-07-02 11:15:36,455 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:compile(554)) - Compiling command(queryId=hadoop_20250702111536_a8fe6b15-57e0-4288-895c-6d4f8fd58503): ALTER TABLE ddp_dmo_dwd.DWD_OrdCusSrvDetail DROP IF EXISTS PARTITION(DT='') 2025-07-02 11:15:36,484 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] hooks.ATSHook (ATSHook.java:<init>(146)) - Created ATS Hook 2025-07-02 11:15:36,484 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] hooks.ATSHook (ATSHook.java:<init>(146)) - Created ATS Hook 2025-07-02 11:15:36,484 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] hooks.ATSHook (ATSHook.java:<init>(146)) - Created ATS Hook 2025-07-02 11:15:37,561 INFO - -> 2025-07-02 11:15:36,669 INFO [Tez session start thread] client.AHSProxy (AHSProxy.java:createAHSProxy(43)) - Connecting to Application History server at dc3-dominos-usdp-fun01/10.30.10.60:10200 2025-07-02 11:15:36,686 INFO [Tez session start thread] client.TezClient (TezClient.java:start(388)) - Session mode. Starting session. 2025-07-02 11:15:36,727 INFO [Tez session start thread] client.ConfiguredRMFailoverProxyProvider (ConfiguredRMFailoverProxyProvider.java:performFailover(100)) - Failing over to rm-dc3-dominos-usdp-fun01 2025-07-02 11:15:36,809 INFO [Tez session start thread] client.TezClientUtils (TezClientUtils.java:setupTezJarsLocalResources(180)) - Using tez.lib.uris value from configuration: hdfs:////dominos-usdp-v3-fun/tez/tez.tar.gz 2025-07-02 11:15:36,809 INFO [Tez session start thread] client.TezClientUtils (TezClientUtils.java:setupTezJarsLocalResources(182)) - Using tez.lib.uris.classpath value from configuration: null 2025-07-02 11:15:36,880 INFO [Tez session start thread] client.TezClient (TezCommonUtils.java:createTezSystemStagingPath(123)) - Tez system stage directory hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/63fc22ae-87a3-4d13-b59e-6ea5a99a9941/.tez/application_1740624029612_5078 doesn't exist and is created 2025-07-02 11:15:36,913 INFO [Tez session start thread] conf.Configuration (Configuration.java:getConfResourceAsInputStream(2845)) - resource-types.xml not found 2025-07-02 11:15:36,914 INFO [Tez session start thread] resource.ResourceUtils (ResourceUtils.java:addResourcesFileToConf(476)) - Unable to find 'resource-types.xml'. 2025-07-02 11:15:36,948 INFO [Tez session start thread] Configuration.deprecation (Configuration.java:logDeprecation(1441)) - yarn.resourcemanager.system-metrics-publisher.enabled is deprecated. Instead, use yarn.system-metrics-publisher.enabled 2025-07-02 11:15:37,000 INFO [pool-7-thread-1] tez.DagUtils (DagUtils.java:createLocalResource(842)) - Resource modification time: 1751426136955 for hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/4caadf81-0f27-469e-8de0-87e177d910e3-resources/hudi-hadoop-mr-bundle-0.13.0.jar 2025-07-02 11:15:37,005 INFO [pool-7-thread-1] tez.DagUtils (DagUtils.java:localizeResource(1159)) - Localizing resource because it does not exist: file:/usr/lib/hive/auxlib/hudi-hive-sync-bundle-0.13.0.jar to dest: hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/4caadf81-0f27-469e-8de0-87e177d910e3-resources/hudi-hive-sync-bundle-0.13.0.jar 2025-07-02 11:15:38,562 INFO - -> 2025-07-02 11:15:37,601 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:isCompatibleWith(346)) - Mestastore configuration metastore.filter.hook changed from org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl to org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook 2025-07-02 11:15:37,602 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:close(600)) - Closed a connection to metastore, current connections: 0 2025-07-02 11:15:37,603 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:checkConcurrency(285)) - Concurrency mode is disabled, not creating a lock manager 2025-07-02 11:15:37,610 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(441)) - Trying to connect to metastore with URI thrift://dc3-dominos-usdp-fun02:9083 2025-07-02 11:15:37,615 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(517)) - Opened a connection to metastore, current connections: 1 2025-07-02 11:15:37,620 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(570)) - Connected to metastore. 2025-07-02 11:15:37,621 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] metastore.RetryingMetaStoreClient (RetryingMetaStoreClient.java:<init>(97)) - RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hadoop (auth:SIMPLE) retries=1 delay=1 lifetime=0 2025-07-02 11:15:37,676 INFO [Tez session start thread] impl.YarnClientImpl (YarnClientImpl.java:submitApplication(338)) - Submitted application application_1740624029612_5078 2025-07-02 11:15:37,685 INFO [Tez session start thread] client.TezClient (TezClient.java:start(404)) - The url to track the Tez Session: http://dc3-dominos-usdp-fun01:8088/proxy/application_1740624029612_5078/ 2025-07-02 11:15:38,143 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:compile(666)) - Semantic Analysis Completed (retrial = false) 2025-07-02 11:15:38,145 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:getSchema(374)) - Returning Hive schema: Schema(fieldSchemas:null, properties:null) 2025-07-02 11:15:38,149 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:compile(781)) - Completed compiling command(queryId=hadoop_20250702111536_a8fe6b15-57e0-4288-895c-6d4f8fd58503); Time taken: 1.723 seconds 2025-07-02 11:15:38,150 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] reexec.ReExecDriver (ReExecDriver.java:run(156)) - Execution #1 of query 2025-07-02 11:15:38,150 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:checkConcurrency(285)) - Concurrency mode is disabled, not creating a lock manager 2025-07-02 11:15:38,150 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:execute(2255)) - Executing command(queryId=hadoop_20250702111536_a8fe6b15-57e0-4288-895c-6d4f8fd58503): ALTER TABLE ddp_dmo_dwd.DWD_OrdCusSrvDetail DROP IF EXISTS PARTITION(DT='') 2025-07-02 11:15:38,153 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] hooks.ATSHook (ATSHook.java:setupAtsExecutor(115)) - Creating ATS executor queue with capacity 64 2025-07-02 11:15:38,177 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] impl.TimelineClientImpl (TimelineClientImpl.java:serviceInit(130)) - Timeline service address: dc3-dominos-usdp-fun01:8188 2025-07-02 11:15:38,295 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:launchTask(2662)) - Starting task [Stage-0:DDL] in serial mode 2025-07-02 11:15:38,414 INFO [ATS Logger 0] hooks.ATSHook (ATSHook.java:createTimelineDomain(155)) - ATS domain created:hive_63fc22ae-87a3-4d13-b59e-6ea5a99a9941(hadoop,hadoop) 2025-07-02 11:15:38,528 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:execute(2531)) - Completed executing command(queryId=hadoop_20250702111536_a8fe6b15-57e0-4288-895c-6d4f8fd58503); Time taken: 0.378 seconds OK 2025-07-02 11:15:38,528 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (SessionState.java:printInfo(1227)) - OK 2025-07-02 11:15:38,529 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:checkConcurrency(285)) - Concurrency mode is disabled, not creating a lock manager Time taken: 2.104 seconds 2025-07-02 11:15:38,529 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] CliDriver (SessionState.java:printInfo(1227)) - Time taken: 2.104 seconds 2025-07-02 11:15:38,530 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] conf.HiveConf (HiveConf.java:getLogIdVar(5037)) - Using the default value passed in for log id: 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:38,530 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] session.SessionState (SessionState.java:resetThreadName(452)) - Resetting thread name to main 2025-07-02 11:15:38,530 INFO [main] conf.HiveConf (HiveConf.java:getLogIdVar(5037)) - Using the default value passed in for log id: 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:38,530 INFO [main] session.SessionState (SessionState.java:updateThreadName(441)) - Updating thread name to 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main 2025-07-02 11:15:38,533 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:compile(554)) - Compiling command(queryId=hadoop_20250702111538_f08ba63c-7b09-48c8-86fe-f29aa249329c): ALTER TABLE ddp_dmo_dwd.DWD_OrdCusSrvDetail ADD IF NOT EXISTS PARTITION(DT='') 2025-07-02 11:15:38,551 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] hooks.ATSHook (ATSHook.java:<init>(146)) - Created ATS Hook 2025-07-02 11:15:38,551 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] hooks.ATSHook (ATSHook.java:<init>(146)) - Created ATS Hook 2025-07-02 11:15:38,551 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] hooks.ATSHook (ATSHook.java:<init>(146)) - Created ATS Hook 2025-07-02 11:15:38,559 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:checkConcurrency(285)) - Concurrency mode is disabled, not creating a lock manager 2025-07-02 11:15:39,286 INFO - process has exited. execute path:/tmp/dolphinscheduler/exec/process/hadoop/16836554651104/18167664743392_10/32581/56672, processId:1190 ,exitStatusCode:1 ,processWaitForStatus:true ,processExitValue:1 2025-07-02 11:15:39,287 INFO - Send task execute result to master, the current task status: TaskExecutionStatus{code=6, desc='failure'} 2025-07-02 11:15:39,287 INFO - Remove the current task execute context from worker cache 2025-07-02 11:15:39,287 INFO - The current execute mode isn't develop mode, will clear the task execute file: /tmp/dolphinscheduler/exec/process/hadoop/16836554651104/18167664743392_10/32581/56672 2025-07-02 11:15:39,288 INFO - Success clear the task execute file: /tmp/dolphinscheduler/exec/process/hadoop/16836554651104/18167664743392_10/32581/56672 2025-07-02 11:15:39,562 INFO - -> 2025-07-02 11:15:38,630 INFO [pool-7-thread-1] tez.DagUtils (DagUtils.java:createLocalResource(842)) - Resource modification time: 1751426138580 for hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/4caadf81-0f27-469e-8de0-87e177d910e3-resources/hudi-hive-sync-bundle-0.13.0.jar 2025-07-02 11:15:38,630 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:openInternal(288)) - Created new resources: null 2025-07-02 11:15:38,644 INFO [pool-7-thread-1] tez.DagUtils (DagUtils.java:getHiveJarDirectory(1058)) - Jar dir is null / directory doesn't exist. Choosing HIVE_INSTALL_DIR - /user/hadoop/.hiveJars 2025-07-02 11:15:38,666 INFO [pool-7-thread-1] tez.DagUtils (DagUtils.java:createLocalResource(842)) - Resource modification time: 1715146950410 for hdfs://dominos-usdp-v3-fun/user/hadoop/.hiveJars/hive-exec-3.1.3-3420a6126cfea97266fe35b708da5d5f95a5b158cad390dc4124081a39cf906f.jar 2025-07-02 11:15:38,697 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:compile(666)) - Semantic Analysis Completed (retrial = false) 2025-07-02 11:15:38,698 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:getSchema(374)) - Returning Hive schema: Schema(fieldSchemas:null, properties:null) 2025-07-02 11:15:38,698 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:compile(781)) - Completed compiling command(queryId=hadoop_20250702111538_f08ba63c-7b09-48c8-86fe-f29aa249329c); Time taken: 0.165 seconds 2025-07-02 11:15:38,698 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] reexec.ReExecDriver (ReExecDriver.java:run(156)) - Execution #1 of query 2025-07-02 11:15:38,698 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:checkConcurrency(285)) - Concurrency mode is disabled, not creating a lock manager 2025-07-02 11:15:38,698 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:execute(2255)) - Executing command(queryId=hadoop_20250702111538_f08ba63c-7b09-48c8-86fe-f29aa249329c): ALTER TABLE ddp_dmo_dwd.DWD_OrdCusSrvDetail ADD IF NOT EXISTS PARTITION(DT='') 2025-07-02 11:15:38,700 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:launchTask(2662)) - Starting task [Stage-0:DDL] in serial mode 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.io.sort.mb, mr initial value=100, tez(original):tez.runtime.io.sort.mb=null, tez(final):tez.runtime.io.sort.mb=100 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.read.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.read.timeout=null, tez(final):tez.runtime.shuffle.read.timeout=180000 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.minimum-allowed-tasks, mr initial value=10, tez(original):tez.am.minimum.allowed.speculative.tasks=null, tez(final):tez.am.minimum.allowed.speculative.tasks=10 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.ifile.readahead.bytes, mr initial value=4194304, tez(original):tez.runtime.ifile.readahead.bytes=null, tez(final):tez.runtime.ifile.readahead.bytes=4194304 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.shuffle.ssl.enabled, mr initial value=false, tez(original):tez.runtime.shuffle.ssl.enable=null, tez(final):tez.runtime.shuffle.ssl.enable=false 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.map.sort.spill.percent, mr initial value=0.80, tez(original):tez.runtime.sort.spill.percent=null, tez(final):tez.runtime.sort.spill.percent=0.80 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.speculative-cap-running-tasks, mr initial value=0.1, tez(original):tez.am.proportion.running.tasks.speculatable=null, tez(final):tez.am.proportion.running.tasks.speculatable=0.1 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.speculative-cap-total-tasks, mr initial value=0.01, tez(original):tez.am.proportion.total.tasks.speculatable=null, tez(final):tez.am.proportion.total.tasks.speculatable=0.01 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.ifile.readahead, mr initial value=true, tez(original):tez.runtime.ifile.readahead=null, tez(final):tez.runtime.ifile.readahead=true 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.merge.percent, mr initial value=0.66, tez(original):tez.runtime.shuffle.merge.percent=null, tez(final):tez.runtime.shuffle.merge.percent=0.66 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.parallelcopies, mr initial value=50, tez(original):tez.runtime.shuffle.parallel.copies=null, tez(final):tez.runtime.shuffle.parallel.copies=50 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.retry-after-speculate, mr initial value=15000, tez(original):tez.am.soonest.retry.after.speculate=null, tez(final):tez.am.soonest.retry.after.speculate=15000 2025-07-02 11:15:38,726 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.reduce.slowstart.completedmaps, mr initial value=0.95, tez(original):tez.shuffle-vertex-manager.min-src-fraction=null, tez(final):tez.shuffle-vertex-manager.min-src-fraction=0.95 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.memory.limit.percent, mr initial value=0.25, tez(original):tez.runtime.shuffle.memory.limit.percent=null, tez(final):tez.runtime.shuffle.memory.limit.percent=0.25 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.speculative.retry-after-no-speculate, mr initial value=1000, tez(original):tez.am.soonest.retry.after.no.speculate=null, tez(final):tez.am.soonest.retry.after.no.speculate=1000 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.io.sort.factor, mr initial value=100, tez(original):tez.runtime.io.sort.factor=null, tez(final):tez.runtime.io.sort.factor=100 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.map.output.compress, mr initial value=false, tez(original):tez.runtime.compress=null, tez(final):tez.runtime.compress=false 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.connect.timeout, mr initial value=180000, tez(original):tez.runtime.shuffle.connect.timeout=null, tez(final):tez.runtime.shuffle.connect.timeout=180000 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.input.buffer.percent, mr initial value=0.0, tez(original):tez.runtime.task.input.post-merge.buffer.percent=null, tez(final):tez.runtime.task.input.post-merge.buffer.percent=0.0 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.map.output.compress.codec, mr initial value=org.apache.hadoop.io.compress.DefaultCodec, tez(original):tez.runtime.compress.codec=null, tez(final):tez.runtime.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.merge.progress.records, mr initial value=10000, tez(original):tez.runtime.merge.progress.records=null, tez(final):tez.runtime.merge.progress.records=10000 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):map.sort.class, mr initial value=org.apache.hadoop.util.QuickSort, tez(original):tez.runtime.internal.sorter.class=null, tez(final):tez.runtime.internal.sorter.class=org.apache.hadoop.util.QuickSort 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.reduce.shuffle.input.buffer.percent, mr initial value=0.70, tez(original):tez.runtime.shuffle.fetch.buffer.percent=null, tez(final):tez.runtime.shuffle.fetch.buffer.percent=0.70 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.counters.max, mr initial value=120, tez(original):tez.counters.max=null, tez(final):tez.counters.max=120 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.hdfs-servers, mr initial value=hdfs://dominos-usdp-v3-fun, tez(original):tez.job.fs-servers=null, tez(final):tez.job.fs-servers=hdfs://dominos-usdp-v3-fun 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.queuename, mr initial value=default, tez(original):tez.queue.name=default, tez(final):tez.queue.name=default 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.job.maxtaskfailures.per.tracker, mr initial value=3, tez(original):tez.am.maxtaskfailures.per.node=null, tez(final):tez.am.maxtaskfailures.per.node=3 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):mapreduce.task.timeout, mr initial value=600000, tez(original):tez.task.timeout-ms=null, tez(final):tez.task.timeout-ms=600000 2025-07-02 11:15:38,727 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:setupTezParamsBasedOnMR(562)) - Config: mr(unset):yarn.app.mapreduce.am.job.task.listener.thread-count, mr initial value=30, tez(original):tez.am.task.listener.thread-count=null, tez(final):tez.am.task.listener.thread-count=30 2025-07-02 11:15:38,731 INFO [pool-7-thread-1] sqlstd.SQLStdHiveAccessController (SQLStdHiveAccessController.java:<init>(96)) - Created SQLStdHiveAccessController for session context : HiveAuthzSessionContext [sessionString=4caadf81-0f27-469e-8de0-87e177d910e3, clientType=HIVECLI] 2025-07-02 11:15:38,731 WARN [pool-7-thread-1] session.SessionState (SessionState.java:setAuthorizerV2Config(950)) - METASTORE_FILTER_HOOK will be ignored, since hive.security.authorization.manager is set to instance of HiveAuthorizerFactory. 2025-07-02 11:15:38,735 INFO [pool-7-thread-1] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(441)) - Trying to connect to metastore with URI thrift://dc3-dominos-usdp-fun02:9083 2025-07-02 11:15:38,739 INFO [pool-7-thread-1] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(517)) - Opened a connection to metastore, current connections: 2 2025-07-02 11:15:38,745 INFO [pool-7-thread-1] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:open(570)) - Connected to metastore. 2025-07-02 11:15:38,745 INFO [pool-7-thread-1] metastore.RetryingMetaStoreClient (RetryingMetaStoreClient.java:<init>(97)) - RetryingMetaStoreClient proxy=class org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient ugi=hadoop (auth:SIMPLE) retries=1 delay=1 lifetime=0 2025-07-02 11:15:38,750 INFO [pool-7-thread-1] client.TezClient (TezClient.java:<init>(210)) - Tez Client Version: [ component=tez-api, version=0.10.2, revision=22f46fe39a7cf99b24275304e99867b9135caba2, SCM-URL=scm:git:https://gitbox.apache.org/repos/asf/tez.git, buildTime=2023-02-08T02:24:56Z, buildUser=jenkins, buildJavaVersion=1.8.0_362 ] 2025-07-02 11:15:38,750 INFO [pool-7-thread-1] tez.TezSessionState (TezSessionState.java:openInternal(363)) - Opening new Tez Session (id: 4caadf81-0f27-469e-8de0-87e177d910e3, scratch dir: hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/4caadf81-0f27-469e-8de0-87e177d910e3) 2025-07-02 11:15:38,773 INFO [pool-7-thread-1] impl.TimelineReaderClientImpl (TimelineReaderClientImpl.java:serviceInit(97)) - Initialized TimelineReader URI=http://dc3-dominos-usdp-fun02:8198/ws/v2/timeline/, clusterId=dominos-usdp-v3-fun 2025-07-02 11:15:38,779 ERROR [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] exec.DDLTask (DDLTask.java:failed(927)) - Failed org.apache.hadoop.hive.ql.metadata.HiveException: partition spec is invalid; field dt does not exist or is empty at org.apache.hadoop.hive.ql.metadata.Partition.createMetaPartitionObject(Partition.java:129) at org.apache.hadoop.hive.ql.metadata.Hive.convertAddSpecToMetaPartition(Hive.java:2525) at org.apache.hadoop.hive.ql.metadata.Hive.createPartitions(Hive.java:2466) at org.apache.hadoop.hive.ql.exec.DDLTask.addPartitions(DDLTask.java:1320) at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:466) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:210) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2664) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2335) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2011) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1709) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1703) at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:218) at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:787) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:683) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:323) at org.apache.hadoop.util.RunJar.main(RunJar.java:236) 2025-07-02 11:15:38,790 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] reexec.ReOptimizePlugin (ReOptimizePlugin.java:run(70)) - ReOptimization: retryPossible: false FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. partition spec is invalid; field dt does not exist or is empty 2025-07-02 11:15:38,791 ERROR [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (SessionState.java:printError(1250)) - FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. partition spec is invalid; field dt does not exist or is empty 2025-07-02 11:15:38,792 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:execute(2531)) - Completed executing command(queryId=hadoop_20250702111538_f08ba63c-7b09-48c8-86fe-f29aa249329c); Time taken: 0.094 seconds 2025-07-02 11:15:38,792 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] ql.Driver (Driver.java:checkConcurrency(285)) - Concurrency mode is disabled, not creating a lock manager 2025-07-02 11:15:38,793 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] conf.HiveConf (HiveConf.java:getLogIdVar(5037)) - Using the default value passed in for log id: 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:38,793 INFO [63fc22ae-87a3-4d13-b59e-6ea5a99a9941 main] session.SessionState (SessionState.java:resetThreadName(452)) - Resetting thread name to main 2025-07-02 11:15:38,793 INFO [main] conf.HiveConf (HiveConf.java:getLogIdVar(5037)) - Using the default value passed in for log id: 63fc22ae-87a3-4d13-b59e-6ea5a99a9941 2025-07-02 11:15:38,799 INFO [main] tez.TezSessionPoolManager (TezSessionPoolManager.java:closeIfNotDefault(351)) - Closing tez session if not default: sessionId=63fc22ae-87a3-4d13-b59e-6ea5a99a9941, queueName=null, user=hadoop, doAs=false, isOpen=false, isDefault=false 2025-07-02 11:15:38,800 INFO [Tez session start thread] client.TezClient (TezClient.java:stop(731)) - Shutting down Tez Session, sessionName=HIVE-63fc22ae-87a3-4d13-b59e-6ea5a99a9941, applicationId=application_1740624029612_5078 2025-07-02 11:15:38,815 INFO [Tez session start thread] client.TezClient (TezClient.java:stop(777)) - Could not connect to AM, killing session via YARN, sessionName=HIVE-63fc22ae-87a3-4d13-b59e-6ea5a99a9941, applicationId=application_1740624029612_5078 2025-07-02 11:15:38,824 INFO [main] tez.TezSessionState (TezSessionState.java:cleanupDagResources(721)) - Attemting to clean up resources for 63fc22ae-87a3-4d13-b59e-6ea5a99a9941: hdfs://dominos-usdp-v3-fun/tmp/hive/hadoop/_tez_session_dir/63fc22ae-87a3-4d13-b59e-6ea5a99a9941-resources; 0 additional files, 2 localized resources 2025-07-02 11:15:38,839 INFO [pool-7-thread-1] client.AHSProxy (AHSProxy.java:createAHSProxy(43)) - Connecting to Application History server at dc3-dominos-usdp-fun01/10.30.10.60:10200 2025-07-02 11:15:38,840 INFO [pool-7-thread-1] client.TezClient (TezClient.java:start(388)) - Session mode. Starting session. 2025-07-02 11:15:38,840 INFO [pool-7-thread-1] client.ConfiguredRMFailoverProxyProvider (ConfiguredRMFailoverProxyProvider.java:performFailover(100)) - Failing over to rm-dc3-dominos-usdp-fun01 2025-07-02 11:15:38,851 INFO [pool-7-thread-1] client.TezClientUtils (TezClientUtils.java:setupTezJarsLocalResources(180)) - Using tez.lib.uris value from configuration: hdfs:////dominos-usdp-v3-fun/tez/tez.tar.gz 2025-07-02 11:15:38,851 INFO [pool-7-thread-1] client.TezClientUtils (TezClientUtils.java:setupTezJarsLocalResources(182)) - Using tez.lib.uris.classpath value from configuration: null 2025-07-02 11:15:38,855 INFO [main] session.SessionState (SessionState.java:dropPathAndUnregisterDeleteOnExit(885)) - Deleted directory: /tmp/hive/hadoop/63fc22ae-87a3-4d13-b59e-6ea5a99a9941 on fs with scheme hdfs 2025-07-02 11:15:38,856 INFO [main] session.SessionState (SessionState.java:dropPathAndUnregisterDeleteOnExit(885)) - Deleted directory: /tmp/hadoop/63fc22ae-87a3-4d13-b59e-6ea5a99a9941 on fs with scheme file 2025-07-02 11:15:38,857 INFO [main] metastore.HiveMetaStoreClient (HiveMetaStoreClient.java:close(600)) - Closed a connection to metastore, current connections: 1 2025-07-02 11:15:39,565 INFO - FINALIZE_SESSION
07-03
内容概要:本文详细介绍了“秒杀商城”微服务架构的设计与实战全过程,涵盖系统从需求分析、服务拆分、技术选型到核心功能开发、分布式事务处理、容器化部署及监控链路追踪的完整流程。重点解决了高并发场景下的超卖问题,采用Redis预减库存、消息队列削峰、数据库乐观锁等手段保障数据一致性,并通过Nacos实现服务注册发现与配置管理,利用Seata处理跨服务分布式事务,结合RabbitMQ实现异步下单,提升系统吞吐能力。同时,项目支持Docker Compose快速部署和Kubernetes生产级编排,集成Sleuth+Zipkin链路追踪与Prometheus+Grafana监控体系,构建可观测性强的微服务系统。; 适合人群:具备Java基础和Spring Boot开发经验,熟悉微服务基本概念的中高级研发人员,尤其是希望深入理解高并发系统设计、分布式事务、服务治理等核心技术的开发者;适合工作2-5年、有志于转型微服务或提升架构能力的工程师; 使用场景及目标:①学习如何基于Spring Cloud Alibaba构建完整的微服务项目;②掌握秒杀场景下高并发、超卖控制、异步化、削峰填谷等关键技术方案;③实践分布式事务(Seata)、服务熔断降级、链路追踪、统一配置中心等企业级中间件的应用;④完成从本地开发到容器化部署的全流程落地; 阅读建议:建议按照文档提供的七个阶段循序渐进地动手实践,重点关注秒杀流程设计、服务间通信机制、分布式事务实现和系统性能优化部分,结合代码调试与监控工具深入理解各组件协作原理,真正掌握高并发微服务系统的构建能力。
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值