使用python 模仿mybinlog 命令 二进制分析mysql binlog

本文介绍了一个用Python编写的脚本,用于解析MySQL的Binlog文件。该脚本能够读取并解析Binlog中的各种事件类型,如写入、更新和删除等操作,并能根据指定条件过滤事件,例如按线程ID或GTID过滤。此外,还提供了生成回滚SQL语句的功能。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

出处:黑洞中的奇点 的博客 http://www.cnblogs.com/kelvin19840813/ 您的支持是对博主最大的鼓励,感谢您的认真阅读。本文版权归作者所有,欢迎转载,但请保留该声明。

使用python 模仿mybinlog 命令 二进制分析mysql binlog  这个脚本在python 2.7 调试通过, 如有问题请留言 .

 

   1 #!/usr/bin/env python
   2 # -*- encoding: utf-8 -*-
   3 
   4 import struct,time,datetime,os
   5 import sys,decimal,getopt,types
   6 reload(sys)
   7 sys.setdefaultencoding('utf-8')
   8 
   9 class column_type_dict:
  10     MYSQL_TYPE_DECIMAL=0
  11     MYSQL_TYPE_TINY=1
  12     MYSQL_TYPE_SHORT=2
  13     MYSQL_TYPE_LONG=3
  14     MYSQL_TYPE_FLOAT=4
  15     MYSQL_TYPE_DOUBLE=5
  16     MYSQL_TYPE_NULL=6
  17     MYSQL_TYPE_TIMESTAMP=7
  18     MYSQL_TYPE_LONGLONG=8
  19     MYSQL_TYPE_INT24=9
  20     MYSQL_TYPE_DATE=10
  21     MYSQL_TYPE_TIME=11
  22     MYSQL_TYPE_DATETIME=12
  23     MYSQL_TYPE_YEAR=13
  24     MYSQL_TYPE_NEWDATE=14
  25     MYSQL_TYPE_VARCHAR=15
  26     MYSQL_TYPE_BIT=16
  27     MYSQL_TYPE_TIMESTAMP2=17
  28     MYSQL_TYPE_DATETIME2=18
  29     MYSQL_TYPE_TIME2=19
  30     MYSQL_TYPE_JSON=245
  31     MYSQL_TYPE_NEWDECIMAL=246
  32     MYSQL_TYPE_ENUM=247
  33     MYSQL_TYPE_SET=248
  34     MYSQL_TYPE_TINY_BLOB=249
  35     MYSQL_TYPE_MEDIUM_BLOB=250
  36     MYSQL_TYPE_LONG_BLOB=251
  37     MYSQL_TYPE_BLOB=252
  38     MYSQL_TYPE_VAR_STRING=253
  39     MYSQL_TYPE_STRING=254
  40     MYSQL_TYPE_GEOMETRY=255
  41 
  42 class binlog_events:
  43     UNKNOWN_EVENT= 0
  44     START_EVENT_V3= 1
  45     QUERY_EVENT= 2
  46     STOP_EVENT= 3
  47     ROTATE_EVENT= 4
  48     INTVAR_EVENT= 5
  49     LOAD_EVENT= 6
  50     SLAVE_EVENT= 7
  51     CREATE_FILE_EVENT= 8
  52     APPEND_BLOCK_EVENT= 9
  53     EXEC_LOAD_EVENT= 10
  54     DELETE_FILE_EVENT= 11
  55     NEW_LOAD_EVENT= 12
  56     RAND_EVENT= 13
  57     USER_VAR_EVENT= 14
  58     FORMAT_DESCRIPTION_EVENT= 15
  59     XID_EVENT= 16
  60     BEGIN_LOAD_QUERY_EVENT= 17
  61     EXECUTE_LOAD_QUERY_EVENT= 18
  62     TABLE_MAP_EVENT = 19
  63     PRE_GA_WRITE_ROWS_EVENT = 20
  64     PRE_GA_UPDATE_ROWS_EVENT = 21
  65     PRE_GA_DELETE_ROWS_EVENT = 22
  66     WRITE_ROWS_EVENT = 23
  67     UPDATE_ROWS_EVENT = 24
  68     DELETE_ROWS_EVENT = 25
  69     INCIDENT_EVENT= 26
  70     HEARTBEAT_LOG_EVENT= 27
  71     IGNORABLE_LOG_EVENT= 28
  72     ROWS_QUERY_LOG_EVENT= 29
  73     WRITE_ROWS_EVENT = 30
  74     UPDATE_ROWS_EVENT = 31
  75     DELETE_ROWS_EVENT = 32
  76     GTID_LOG_EVENT= 33
  77     ANONYMOUS_GTID_LOG_EVENT= 34
  78     PREVIOUS_GTIDS_LOG_EVENT= 35
  79 
  80 class json_type:
  81     NULL_COLUMN = 251
  82     UNSIGNED_CHAR_COLUMN = 251
  83     UNSIGNED_SHORT_COLUMN = 252
  84     UNSIGNED_INT24_COLUMN = 253
  85     UNSIGNED_INT64_COLUMN = 254
  86     UNSIGNED_CHAR_LENGTH = 1
  87     UNSIGNED_SHORT_LENGTH = 2
  88     UNSIGNED_INT24_LENGTH = 3
  89     UNSIGNED_INT64_LENGTH = 8
  90 
  91     JSONB_TYPE_SMALL_OBJECT = 0x0
  92     JSONB_TYPE_LARGE_OBJECT = 0x1
  93     JSONB_TYPE_SMALL_ARRAY = 0x2
  94     JSONB_TYPE_LARGE_ARRAY = 0x3
  95     JSONB_TYPE_LITERAL = 0x4
  96     JSONB_TYPE_INT16 = 0x5
  97     JSONB_TYPE_UINT16 = 0x6
  98     JSONB_TYPE_INT32 = 0x7
  99     JSONB_TYPE_UINT32 = 0x8
 100     JSONB_TYPE_INT64 = 0x9
 101     JSONB_TYPE_UINT64 = 0xA
 102     JSONB_TYPE_DOUBLE = 0xB
 103     JSONB_TYPE_STRING = 0xC
 104     JSONB_TYPE_OPAQUE = 0xF
 105 
 106     JSONB_LITERAL_NULL = 0x0
 107     JSONB_LITERAL_TRUE = 0x1
 108     JSONB_LITERAL_FALSE = 0x2
 109 
 110 BINLOG_FILE_HEADER = b'\xFE\x62\x69\x6E'
 111 binlog_event_header_len = 19
 112 binlog_event_fix_part = 13
 113 binlog_quer_event_stern = 4
 114 binlog_row_event_extra_headers = 2
 115 read_format_desc_event_length = 56
 116 binlog_xid_event_length = 8
 117 table_map_event_fix_length = 8
 118 fix_length = 8
 119 
 120 class _rollback:
 121     rollback_status = None
 122 
 123     database = None
 124     table = None
 125     _gtid = None
 126 
 127     _myfile = None
 128     _myfunc = None
 129 
 130 class _remote_filed:
 131     _gtid = None
 132     _gtid_status = None
 133 
 134     _thread_id = None
 135     _tid_status = None
 136     _tid_gid = None
 137     _tid_gid_pos = None
 138     _tid_gid_time = None
 139 
 140     _rollback_status = None
 141 
 142 
 143 class Echo(object):
 144     '''
 145     print binlog 
 146     '''
 147 
 148     def Version(self, binlog_ver, server_ver, create_time):
 149         print 'binlog_ver : {}   server_ver : {}   create_time : {}'.format(binlog_ver, server_ver, create_time)
 150 
 151 
 152     def TractionHeader(self, thread_id, database_name, sql_statement, timestamp,_pos):
 153 
 154         if _remote_filed._thread_id:
 155             if _remote_filed._thread_id == thread_id:
 156                 self.Gtid(timestamp,_remote_filed._tid_gid,_remote_filed._tid_gid_pos)
 157                 print '{}  GTID_NEXT : {} at pos : {}'.format(_remote_filed._tid_gid_time, _remote_filed._tid_gid, _remote_filed._tid_gid_pos)
 158                 print '{}  thread id : {}  at pos : {}  database : {}   statement : {}'.format(timestamp, thread_id, _pos,
 159                                                                                                database_name, sql_statement)
 160                 _remote_filed._tid_status = True
 161                 if _remote_filed._rollback_status:
 162                     _rollback.database = database_name
 163         elif _remote_filed._gtid:
 164             if _remote_filed._gtid_status:
 165                 print '{}  thread id : {}  at pos : {}  database : {}   statement : {}'.format(timestamp, thread_id,
 166                                                                                                _pos,
 167                                                                                                database_name,
 168                                                                                                sql_statement)
 169                 if _remote_filed._rollback_status:
 170                     _rollback.database = database_name
 171         elif _rollback.rollback_status:
 172             _rollback.database = database_name
 173         else:
 174             print '{}  thread id : {}  at pos : {}  database : {}   statement : {}'.format(timestamp, thread_id, _pos, database_name, sql_statement)
 175 
 176     def Xid(self, timestamp, xid_num,_pos):
 177         if _remote_filed._thread_id:
 178             if _remote_filed._tid_status:
 179                 _remote_filed._tid_status = None
 180                 print '{}  statement : COMMIT  xid : {}  at pos : {}'.format(timestamp, xid_num, _pos)
 181                 print ''
 182         elif _remote_filed._gtid:
 183             if _remote_filed._gtid_status:
 184                 print '{}  statement : COMMIT  xid : {}  at pos : {}'.format(timestamp, xid_num, _pos)
 185                 print ''
 186                 raise ''
 187         elif _rollback.rollback_status:
 188             _rollback._myfunc.SaveGtid(xid=True)
 189         else:
 190             print '{}  statement : COMMIT  xid : {}  at pos : {}'.format(timestamp, xid_num, _pos)
 191             print ''
 192 
 193     def Tablemap(self, timestamp, tablename):
 194         if _remote_filed._thread_id:
 195             if _remote_filed._tid_status:
 196                 print '{}  tablename : {}'.format(timestamp, tablename)
 197             if _remote_filed._rollback_status:
 198                 _rollback.table = tablename
 199         elif _remote_filed._gtid:
 200             if _remote_filed._gtid_status:
 201                 print '{}  tablename : {}'.format(timestamp, tablename)
 202             if _remote_filed._rollback_status:
 203                 _rollback.table = tablename
 204         elif _rollback.rollback_status:
 205             _rollback.table = tablename
 206         else:
 207             print '{}  tablename : {}'.format(timestamp, tablename)
 208     def Gtid(self, timestamp, gtid,_pos):
 209         if _remote_filed._thread_id:
 210             _remote_filed._tid_gid,_remote_filed._tid_gid_pos,_remote_filed._tid_gid_time = gtid,_pos,timestamp
 211         elif _remote_filed._gtid:
 212             if _remote_filed._gtid == gtid:
 213                 print '{}  GTID_NEXT : {} at pos : {}'.format(timestamp, gtid, _pos)
 214                 _remote_filed._gtid_status = True
 215         elif _rollback.rollback_status:
 216             _rollback._myfunc.SaveGtid(gtid=gtid)
 217         else:
 218             print '{}  GTID_NEXT : {} at pos : {}'.format(timestamp, gtid,_pos)
 219 
 220     def TractionVlues(self, before_value=None, after_value=None, type=None):
 221         if _remote_filed._thread_id:
 222             if _remote_filed._tid_status:
 223                 if _remote_filed._rollback_status:
 224                     _rollback._myfunc.CreateSQL(before_value=before_value, after_value=after_value, event_type=type)
 225                 else:
 226                     self._tv(before_value=before_value, after_value=after_value, type=type)
 227         elif _remote_filed._gtid:
 228             if _remote_filed._gtid_status:
 229                 if _remote_filed._rollback_status:
 230                     _rollback._myfunc.CreateSQL(before_value=before_value, after_value=after_value, event_type=type)
 231                 else:
 232                     self._tv(before_value=before_value, after_value=after_value, type=type)
 233         elif _rollback.rollback_status:
 234             _rollback._myfunc.CreateSQL(before_value=before_value,after_value=after_value,event_type=type)
 235         else:
 236             self._tv(before_value=before_value,after_value=after_value,type=type)
 237 
 238     def _tv(self,before_value=None, after_value=None, type=None):
 239         if type == binlog_events.UPDATE_ROWS_EVENT:
 240             print '{: >21}{}  before_value : [{}]  after_value : [{}]'.format('', 'UPDATE_ROWS_EVENT',
 241                                                                               ','.join(['{}'.format(a) for a in
 242                                                                                         before_value]), ','.join(
 243                     ['{}'.format(a) for a in after_value]))
 244         else:
 245             if type == binlog_events.DELETE_ROWS_EVENT:
 246                 print '{: >21}{}  value : [{}] '.format('', 'DELETE_ROW_EVENT',
 247                                                         ','.join(['{}'.format(a) for a in after_value]))
 248             elif type == binlog_events.WRITE_ROWS_EVENT:
 249                 print '{: >21}{}  value : [{}] '.format('', 'WRITE_ROW_EVENT',
 250                                                         ','.join(['{}'.format(a) for a in after_value]))
 251 
 252 class PrintSql(object):
 253     def __seek(self,num):
 254         try:
 255             _rollback._myfile.seek(-num,1)
 256         except:
 257             _rollback._myfile.close()
 258             self.rmfile()
 259             sys.exit()
 260     def read(self):
 261         _num = 9
 262         _rollback._myfile.seek(0,2)
 263         while True:
 264             self.__seek(_num)
 265             _value,_type_code, = struct.unpack('QB',_rollback._myfile.read(_num))
 266             self.__seek(_num)
 267             if _type_code == 1:
 268                 self.__gtid(_value)
 269             elif _type_code == 2:
 270                 self.__statement(_value)
 271             else:
 272                 print 'Error: type_code {} '.format(_type_code)
 273     def __gtid(self,tid):
 274         self.__seek(36)
 275         _uuid = _rollback._myfile.read(36)
 276         gtid = _uuid.decode('utf8') + ':' + str(tid)
 277         print ''
 278         print '#{: >10}  GTID_NEXT : {}'.format('-',gtid)
 279         self.__seek(36)
 280     def __statement(self,length):
 281         self.__seek(length)
 282         _sql = _rollback._myfile.read(length)
 283         sql, = struct.unpack('{}s'.format(length),_sql)
 284         print '{: >10}{}'.format('',sql)
 285         self.__seek(length)
 286 
 287     def rmfile(self):
 288         os.remove('tmp_rollback')
 289 
 290 class GetRollStatement(object):
 291     def __init__(self,host,user,passwd,port=None):
 292         import pymysql,traceback
 293         self.port = port if port != None else 3306
 294         try:
 295             self.local_conn = pymysql.connect(host=host, user=user, passwd=passwd, port=port, db='',
 296                                          charset="utf8")
 297         except pymysql.Error,e:
 298             print traceback.format_exc()
 299         self.column_list = []
 300         self._is_pri = []
 301     def __join(self,column,value):
 302         if type(value) is types.StringType:
 303             if value == 'Null':
 304                 return ' {}={}'.format(column, value)
 305             else:
 306                 return ' {}="{}"'.format(column, value)
 307         else:
 308             return  ' {}={}'.format(column,value)
 309     def WriteEvent(self,values):
 310         sql = 'delete from {}.{} where '.format(_rollback.database,_rollback.table)
 311         if self._is_pri:
 312             sql += self.__join(self._is_pri[0][0],values[self._is_pri[0][1]])
 313         else:
 314             for i,column in enumerate(self.column_list):
 315                 sql += self.__join(column[0],values[i])
 316                 if column != self.column_list[-1]:
 317                     sql += ' and'
 318         if _remote_filed._rollback_status:
 319             print '{: >21}{}{}'.format('', '-- ', sql)
 320         else:
 321             self.__tmppack(sql,2)
 322     def DeleteEvent(self,values):
 323         sql = 'insert into {}.{}({}) values('.format(_rollback.database,_rollback.table,','.join([a[0] for a in self.column_list]))
 324         for idex,value in enumerate(values):
 325             if type(value) is types.StringType:
 326                 if value == 'Null':
 327                     sql += '{}'.format(value)
 328                 else:
 329                     sql += '"{}"'.format(value)
 330             else:
 331                 sql += '{}'.format(value)
 332             if len(values[idex:]) <= 1:
 333                 sql += ')'
 334             else:
 335                 sql += ','
 336         if _remote_filed._rollback_status:
 337             print '{: >21}{}{}'.format('', '-- ', sql)
 338         else:
 339             self.__tmppack(sql, 2)
 340     def UpateEvent(self,after_values,befor_values):
 341         _set = []
 342         _where = []
 343         if self._is_pri:
 344             _where.append(self.__join(self._is_pri[0][0],after_values[self._is_pri[0][1]]))
 345         else:
 346             for i,column in enumerate(self.column_list):
 347                 _where.append(self.__join(column[0],after_values[i]))
 348 
 349         for i,column in enumerate(self.column_list):
 350             _set.append(self.__join(column[0],befor_values[i]))
 351         sql = 'update {}.{} set {} where {}'.format(_rollback.database, _rollback.table, ','.join(_set).replace(" ",""), ','.join(_where))
 352         if _remote_filed._rollback_status:
 353             print '{: >21}{}{}'.format('', '-- ',sql)
 354         else:
 355             self.__tmppack(sql, 2)
 356 
 357     def GetColumnName(self):
 358         with self.local_conn.cursor() as cur:
 359             sql = 'desc {}.{};'.format(_rollback.database,_rollback.table)
 360             cur.execute(sql)
 361             result = cur.fetchall()
 362         self.column_list = [[a[0],a[3]] for a in result]
 363 
 364     def CreateSQL(self,before_value=None,after_value=None,event_type=None):
 365         self.GetColumnName()
 366         self._is_pri = [[_a[0], idex] for idex, _a in enumerate(self.column_list) if 'PRI' in _a]
 367         if event_type == binlog_events.WRITE_ROWS_EVENT:
 368             self.WriteEvent(after_value)
 369         elif event_type == binlog_events.UPDATE_ROWS_EVENT:
 370             self.UpateEvent(after_value,before_value)
 371         elif event_type == binlog_events.DELETE_ROWS_EVENT:
 372             self.DeleteEvent(after_value)
 373 
 374     def SaveGtid(self,gtid=None,xid=None):
 375         if xid:
 376             __gtid = _rollback._gtid.split(':')
 377             tid = int(__gtid[1])
 378             uuid = str(__gtid[0])
 379             self.__tmppackgtid(uuid,tid,1)
 380         elif _rollback._gtid != gtid:
 381             _rollback._gtid = gtid
 382 
 383     def __tmppackgtid(self,uuid,tid,type):
 384         s_uuid = struct.Struct('{}s'.format(len(uuid)))
 385         s_header = struct.Struct('QB')
 386         _uuid = s_uuid.pack(uuid)
 387         _header = s_header.pack(tid,type)
 388         _rollback._myfile.write(_uuid)
 389         _rollback._myfile.write(_header)
 390     def __tmppack(self,value,type):
 391         import re
 392         _value = re.sub(r"\s{2,}"," ",str(value).strip()) + ';'
 393         s_value = struct.Struct('{}s'.format(len(_value)))
 394         s_header = struct.Struct('QB')
 395         _value = s_value.pack(_value)
 396         _header = s_header.pack(len(_value),type)
 397         _rollback._myfile.write(_value)
 398         _rollback._myfile.write(_header)
 399 
 400     def _close(self):
 401         self.local_conn.close()
 402 
 403 class Read(Echo):
 404     def __init__(self,start_position=None,filename=None,pack=None):
 405         self.__packet = pack
 406         if filename:
 407             self.file_data = open(filename, 'rb')
 408             read_byte = self.read_bytes(4)
 409             if read_byte != BINLOG_FILE_HEADER:
 410                 print 'error : Is not a standard binlog file format'
 411                 exit()
 412         if start_position:
 413             self.file_data.seek(start_position-4,1)
 414 
 415     def read_int_be_by_size(self, size ,bytes=None):
 416         '''Read a big endian integer values based on byte number'''
 417         if bytes is None:
 418             if size == 1:
 419                 return struct.unpack('>b', self.read_bytes(size))[0]
 420             elif size == 2:
 421                 return struct.unpack('>h', self.read_bytes(size))[0]
 422             elif size == 3:
 423                 return self.read_int24_be()
 424             elif size == 4:
 425                 return struct.unpack('>i', self.read_bytes(size))[0]
 426             elif size == 5:
 427                 return self.read_int40_be()
 428             elif size == 8:
 429                 return struct.unpack('>l', self.read_bytes(size))[0]
 430         else:
 431             '''used for read new decimal'''
 432             if size == 1:
 433                 return struct.unpack('>b', bytes[0:size])[0]
 434             elif size == 2:
 435                 return struct.unpack('>h', bytes[0:size])[0]
 436             elif size == 3:
 437                 return self.read_int24_be(bytes)
 438             elif size == 4:
 439                 return struct.unpack('>i',bytes[0:size])[0]
 440 
 441     def read_int24_be(self,bytes=None):
 442         if bytes is None:
 443             a, b, c = struct.unpack('BBB', self.read_bytes(3))
 444         else:
 445             a, b, c = struct.unpack('BBB', bytes[0:3])
 446         res = (a << 16) | (b << 8) | c
 447         if res >= 0x800000:
 448             res -= 0x1000000
 449         return res
 450 
 451     def read_uint_by_size(self, size):
 452         '''Read a little endian integer values based on byte number'''
 453         if size == 1:
 454             return self.read_uint8()
 455         elif size == 2:
 456             return self.read_uint16()
 457         elif size == 3:
 458             return self.read_uint24()
 459         elif size == 4:
 460             return self.read_uint32()
 461         elif size == 5:
 462             return self.read_uint40()
 463         elif size == 6:
 464             return self.read_uint48()
 465         elif size == 7:
 466             return self.read_uint56()
 467         elif size == 8:
 468             return self.read_uint64()
 469 
 470     def read_uint24(self):
 471         a, b, c = struct.unpack("<BBB", self.read_bytes(3))
 472         return a + (b << 8) + (c << 16)
 473 
 474     def read_int24(self):
 475         a, b, c = struct.unpack("<bbb", self.read_bytes(3))
 476         return a + (b << 8) + (c << 16)
 477 
 478     def read_uint40(self):
 479         a, b = struct.unpack("<BI", self.read_bytes(5))
 480         return a + (b << 8)
 481 
 482     def read_int40_be(self):
 483         a, b = struct.unpack(">IB", self.read_bytes(5))
 484         return b + (a << 8)
 485 
 486     def read_uint48(self):
 487         a, b, c = struct.unpack("<HHH", self.read_bytes(6))
 488         return a + (b << 16) + (c << 32)
 489 
 490     def read_uint56(self):
 491         a, b, c = struct.unpack("<BHI", self.read_bytes(7))
 492         return a + (b << 8) + (c << 24)
 493 
 494     def read_bytes(self, count):
 495         try:
 496             return self.file_data.read(count) if self.__packet is None else self.__packet.read(count)
 497         except:
 498             return None
 499 
 500     def read_uint64(self):
 501         read_byte = self.read_bytes(8)
 502         result, = struct.unpack('Q', read_byte)
 503         return result
 504 
 505     def read_int64(self):
 506         read_byte = self.read_bytes(8)
 507         result, = struct.unpack('q', read_byte)
 508         return result
 509 
 510     def read_uint32(self):
 511         read_byte = self.read_bytes(4)
 512         result, = struct.unpack('I', read_byte)
 513         return result
 514 
 515     def read_int32(self):
 516         read_byte = self.read_bytes(4)
 517         result, = struct.unpack('i', read_byte)
 518         return result
 519 
 520     def read_uint16(self):
 521         read_byte = self.read_bytes(2)
 522         result, = struct.unpack('H', read_byte)
 523         return result
 524 
 525     def read_int16(self):
 526         read_byte = self.read_bytes(2)
 527         result, = struct.unpack('h', read_byte)
 528         return result
 529 
 530     def read_uint8(self):
 531         read_byte = self.read_bytes(1)
 532         result, = struct.unpack('B', read_byte)
 533         return result
 534 
 535     def read_int8(self):
 536         read_byte = self.read_bytes(1)
 537         result, = struct.unpack('b', read_byte)
 538         return result
 539 
 540     def read_format_desc_event(self):
 541         binlog_ver, = struct.unpack('H',self.read_bytes(2))
 542         server_ver, = struct.unpack('50s',self.read_bytes(50))
 543         create_time, = struct.unpack('I',self.read_bytes(4))
 544         return binlog_ver,server_ver,create_time
 545 
 546     def __add_fsp_to_time(self, time, column):
 547         """Read and add the fractional part of time
 548         For more details about new date format:
 549         """
 550         microsecond,read = self.__read_fsp(column)
 551         if microsecond > 0:
 552             time = time.replace(microsecond=microsecond)
 553         return time,read
 554 
 555     def __read_fsp(self, column):
 556         read = 0
 557         if column == 1 or column == 2:
 558             read = 1
 559         elif column == 3 or column == 4:
 560             read = 2
 561         elif column == 5 or column == 6:
 562             read = 3
 563         if read > 0:
 564             microsecond = self.read_int_be_by_size(read)
 565             if column % 2:
 566                 return int(microsecond / 10),read
 567             else:
 568                 return microsecond,read
 569 
 570         return 0,0
 571 
 572 
 573     def __read_binary_slice(self, binary, start, size, data_length):
 574         """
 575         Read a part of binary data and extract a number
 576         binary: the data
 577         start: From which bit (1 to X)
 578         size: How many bits should be read
 579         data_length: data size
 580         """
 581         binary = binary >> data_length - (start + size)
 582         mask = ((1 << size) - 1)
 583         return binary & mask
 584 
 585     def __read_datetime2(self, column):
 586         """DATETIME
 587 
 588         1 bit  sign           (1= non-negative, 0= negative)
 589         17 bits year*13+month  (year 0-9999, month 0-12)
 590          5 bits day            (0-31)
 591          5 bits hour           (0-23)
 592          6 bits minute         (0-59)
 593          6 bits second         (0-59)
 594         ---------------------------
 595         40 bits = 5 bytes
 596         """
 597         data = self.read_int_be_by_size(5)
 598         year_month = self.__read_binary_slice(data, 1, 17, 40)
 599         try:
 600             t = datetime.datetime(
 601                 year=int(year_month / 13),
 602                 month=year_month % 13,
 603                 day=self.__read_binary_slice(data, 18, 5, 40),
 604                 hour=self.__read_binary_slice(data, 23, 5, 40),
 605                 minute=self.__read_binary_slice(data, 28, 6, 40),
 606                 second=self.__read_binary_slice(data, 34, 6, 40))
 607         except ValueError:
 608             return None
 609         __time,read = self.__add_fsp_to_time(t, column)
 610         return __time,read
 611 
 612     def __read_time2(self, column):
 613         """TIME encoding for nonfractional part:
 614 
 615          1 bit sign    (1= non-negative, 0= negative)
 616          1 bit unused  (reserved for future extensions)
 617         10 bits hour   (0-838)
 618          6 bits minute (0-59)
 619          6 bits second (0-59)
 620         ---------------------
 621         24 bits = 3 bytes
 622         """
 623         data = self.read_int_be_by_size(3)
 624 
 625         sign = 1 if self.__read_binary_slice(data, 0, 1, 24) else -1
 626         if sign == -1:
 627             '''
 628             negative integers are stored as 2's compliment
 629             hence take 2's compliment again to get the right value.
 630             '''
 631             data = ~data + 1
 632 
 633         microseconds,read = self.__read_fsp(column)
 634         t = datetime.timedelta(
 635             hours=sign*self.__read_binary_slice(data, 2, 10, 24),
 636             minutes=self.__read_binary_slice(data, 12, 6, 24),
 637             seconds=self.__read_binary_slice(data, 18, 6, 24),
 638             microseconds= microseconds
 639         )
 640         return t,read+3
 641 
 642     def __read_date(self):
 643         time = self.read_uint24()
 644         if time == 0:  # nasty mysql 0000-00-00 dates
 645             return None
 646 
 647         year = (time & ((1 << 15) - 1) << 9) >> 9
 648         month = (time & ((1 << 4) - 1) << 5) >> 5
 649         day = (time & ((1 << 5) - 1))
 650         if year == 0 or month == 0 or day == 0:
 651             return None
 652 
 653         date = datetime.date(
 654             year=year,
 655             month=month,
 656             day=day
 657         )
 658         return date
 659 
 660     def __read_new_decimal(self, precision,decimals):
 661         """Read MySQL's new decimal format introduced in MySQL 5"""
 662         '''
 663         Each multiple of nine digits requires four bytes, and the “leftover” digits require some fraction of four bytes. 
 664         The storage required for excess digits is given by the following table. Leftover Digits    Number of Bytes
 665         
 666         Leftover Digits     Number of Bytes
 667         0                    0
 668         1                    1
 669         2                    1
 670         3                    2
 671         4                    2
 672         5                    3
 673         6                    3
 674         7                    4
 675         8                    4
 676 
 677         '''
 678         digits_per_integer = 9
 679         compressed_bytes = [0, 1, 1, 2, 2, 3, 3, 4, 4, 4]
 680         integral = (precision - decimals)
 681         uncomp_integral = int(integral / digits_per_integer)
 682         uncomp_fractional = int(decimals / digits_per_integer)
 683         comp_integral = integral - (uncomp_integral * digits_per_integer)
 684         comp_fractional = decimals - (uncomp_fractional
 685                                              * digits_per_integer)
 686 
 687         _read_bytes = (uncomp_integral*4) + (uncomp_fractional*4) + compressed_bytes[comp_fractional] + compressed_bytes[comp_integral]
 688 
 689         _data = bytearray(self.read_bytes(_read_bytes))
 690         value = _data[0]
 691         if value & 0x80 != 0:
 692             res = ""
 693             mask = 0
 694         else:
 695             mask = -1
 696             res = "-"
 697         _data[0] = struct.pack('<B', value ^ 0x80)
 698 
 699         size = compressed_bytes[comp_integral]
 700         offset = 0
 701         if size > 0:
 702             offset += size
 703             value = self.read_int_be_by_size(size=size,bytes=_data) ^ mask
 704             res += str(value)
 705 
 706         for i in range(0, uncomp_integral):
 707             offset += 4
 708             value = struct.unpack('>i', _data[offset-4:offset])[0] ^ mask
 709             res += '%09d' % value
 710 
 711         res += "."
 712 
 713         for i in range(0, uncomp_fractional):
 714             offset += 4
 715             value = struct.unpack('>i', _data[offset-4:offset])[0] ^ mask
 716             res += '%09d' % value
 717 
 718         size = compressed_bytes[comp_fractional]
 719         if size > 0:
 720             value = self.read_int_be_by_size(size=size,bytes=_data[offset:]) ^ mask
 721             res += '%0*d' % (comp_fractional, value)
 722 
 723         return decimal.Decimal(res),_read_bytes
 724 
 725     def __is_null(self, null_bitmap, position):
 726         bit = null_bitmap[int(position / 8)]
 727         if type(bit) is str:
 728             bit = ord(bit)
 729         return bit & (1 << (position % 8))
 730 
 731     '''parsing for json'''
 732     '''################################################################'''
 733     def read_binary_json(self, length):
 734         t = self.read_uint8()
 735         return self.read_binary_json_type(t, length)
 736 
 737     def read_binary_json_type(self, t, length):
 738         large = (t in (json_type.JSONB_TYPE_LARGE_OBJECT, json_type.JSONB_TYPE_LARGE_ARRAY))
 739         if t in (json_type.JSONB_TYPE_SMALL_OBJECT, json_type.JSONB_TYPE_LARGE_OBJECT):
 740             return self.read_binary_json_object(length - 1, large)
 741         elif t in (json_type.JSONB_TYPE_SMALL_ARRAY, json_type.JSONB_TYPE_LARGE_ARRAY):
 742             return self.read_binary_json_array(length - 1, large)
 743         elif t in (json_type.JSONB_TYPE_STRING,):
 744             return self.read_length_coded_pascal_string(1)
 745         elif t in (json_type.JSONB_TYPE_LITERAL,):
 746             value = self.read_uint8()
 747             if value == json_type.JSONB_LITERAL_NULL:
 748                 return None
 749             elif value == json_type.JSONB_LITERAL_TRUE:
 750                 return True
 751             elif value == json_type.JSONB_LITERAL_FALSE:
 752                 return False
 753         elif t == json_type.JSONB_TYPE_INT16:
 754             return self.read_int16()
 755         elif t == json_type.JSONB_TYPE_UINT16:
 756             return self.read_uint16()
 757         elif t in (json_type.JSONB_TYPE_DOUBLE,):
 758             return struct.unpack('<d', self.read(8))[0]
 759         elif t == json_type.JSONB_TYPE_INT32:
 760             return self.read_int32()
 761         elif t == json_type.JSONB_TYPE_UINT32:
 762             return self.read_uint32()
 763         elif t == json_type.JSONB_TYPE_INT64:
 764             return self.read_int64()
 765         elif t == json_type.JSONB_TYPE_UINT64:
 766             return self.read_uint64()
 767 
 768         raise ValueError('Json type %d is not handled' % t)
 769 
 770     def read_binary_json_type_inlined(self, t):
 771         if t == json_type.JSONB_TYPE_LITERAL:
 772             value = self.read_uint16()
 773             if value == json_type.JSONB_LITERAL_NULL:
 774                 return None
 775             elif value == json_type.JSONB_LITERAL_TRUE:
 776                 return True
 777             elif value == json_type.JSONB_LITERAL_FALSE:
 778                 return False
 779         elif t == json_type.JSONB_TYPE_INT16:
 780             return self.read_int16()
 781         elif t == json_type.JSONB_TYPE_UINT16:
 782             return self.read_uint16()
 783         elif t == json_type.JSONB_TYPE_INT32:
 784             return self.read_int32()
 785         elif t == json_type.JSONB_TYPE_UINT32:
 786             return self.read_uint32()
 787 
 788         raise ValueError('Json type %d is not handled' % t)
 789 
 790     def read_binary_json_object(self, length, large):
 791         if large:
 792             elements = self.read_uint32()
 793             size = self.read_uint32()
 794         else:
 795             elements = self.read_uint16()
 796             size = self.read_uint16()
 797 
 798         if size > length:
 799             raise ValueError('Json length is larger than packet length')
 800 
 801         if large:
 802             key_offset_lengths = [(
 803                 self.read_uint32(),  # offset (we don't actually need that)
 804                 self.read_uint16()   # size of the key
 805                 ) for _ in range(elements)]
 806         else:
 807             key_offset_lengths = [(
 808                 self.read_uint16(),  # offset (we don't actually need that)
 809                 self.read_uint16()   # size of key
 810                 ) for _ in range(elements)]
 811 
 812         value_type_inlined_lengths = [self.read_offset_or_inline(large)
 813                                       for _ in range(elements)]
 814 
 815         keys = [self.__read_decode(x[1]) for x in key_offset_lengths]
 816 
 817         out = {}
 818         for i in range(elements):
 819             if value_type_inlined_lengths[i][1] is None:
 820                 data = value_type_inlined_lengths[i][2]
 821             else:
 822                 t = value_type_inlined_lengths[i][0]
 823                 data = self.read_binary_json_type(t, length)
 824             out[keys[i]] = data
 825 
 826         return out
 827 
 828     def read_binary_json_array(self, length, large):
 829         if large:
 830             elements = self.read_uint32()
 831             size = self.read_uint32()
 832         else:
 833             elements = self.read_uint16()
 834             size = self.read_uint16()
 835 
 836         if size > length:
 837             raise ValueError('Json length is larger than packet length')
 838 
 839         values_type_offset_inline = [self.read_offset_or_inline(large) for _ in range(elements)]
 840 
 841         def _read(x):
 842             if x[1] is None:
 843                 return x[2]
 844             return self.read_binary_json_type(x[0], length)
 845 
 846         return [_read(x) for x in values_type_offset_inline]
 847 
 848     def read_offset_or_inline(self,large):
 849         t = self.read_uint8()
 850 
 851         if t in (json_type.JSONB_TYPE_LITERAL,
 852                  json_type.JSONB_TYPE_INT16, json_type.JSONB_TYPE_UINT16):
 853             return (t, None, self.read_binary_json_type_inlined(t))
 854         if large and t in (json_type.JSONB_TYPE_INT32, json_type.JSONB_TYPE_UINT32):
 855             return (t, None, self.read_binary_json_type_inlined(t))
 856 
 857         if large:
 858             return (t, self.read_uint32(), None)
 859         return (t, self.read_uint16(), None)
 860 
 861     def read_length_coded_pascal_string(self, size):
 862         """Read a string with length coded using pascal style.
 863         The string start by the size of the string
 864         """
 865         length = self.read_uint_by_size(size)
 866         return self.__read_decode(length)
 867     '''###################################################'''
 868 
 869     def read_header(self):
 870         '''binlog_event_header_len = 19
 871         timestamp : 4bytes
 872         type_code : 1bytes
 873         server_id : 4bytes
 874         event_length : 4bytes
 875         next_position : 4bytes
 876         flags : 2bytes
 877         '''
 878         read_byte = self.read_bytes(19)
 879         if read_byte:
 880             result = struct.unpack('=IBIIIH',read_byte)
 881             type_code,event_length,timestamp = result[1],result[3],result[0]
 882             return type_code,event_length,time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(timestamp))
 883         else:
 884             return None,None,None
 885 
 886     def read_query_event(self,event_length=None):
 887         '''fix_part = 13:
 888                 thread_id : 4bytes
 889                 execute_seconds : 4bytes
 890                 database_length : 1bytes
 891                 error_code : 2bytes
 892                 variable_block_length : 2bytes
 893             variable_part :
 894                 variable_block_length = fix_part.variable_block_length
 895                 database_name = fix_part.database_length   
 896                 sql_statement = event_header.event_length - 19 - 13 - variable_block_length - database_length - 4
 897         '''
 898         read_byte = self.read_bytes(binlog_event_fix_part)
 899         fix_result = struct.unpack('=IIBHH', read_byte)
 900         thread_id = fix_result[0]
 901         self.read_bytes(fix_result[4])
 902         read_byte = self.read_bytes(fix_result[2])
 903         database_name, = struct.unpack('{}s'.format(fix_result[2]), read_byte)
 904         statement_length = event_length - binlog_event_fix_part - binlog_event_header_len \
 905                            - fix_result[4] - fix_result[2] - binlog_quer_event_stern
 906         read_byte = self.read_bytes(statement_length)
 907         _a, sql_statement, = struct.unpack('1s{}s'.format(statement_length - 1), read_byte)
 908         return thread_id, database_name, sql_statement
 909 
 910     def read_table_map_event(self,event_length):
 911         '''
 912         fix_part = 8
 913             table_id : 6bytes
 914             Reserved : 2bytes
 915         variable_part:
 916             database_name_length : 1bytes
 917             database_name : database_name_length bytes + 1
 918             table_name_length : 1bytes
 919             table_name : table_name_length bytes + 1
 920             cloums_count : 1bytes
 921             colums_type_array : one byte per column  
 922             mmetadata_lenth : 1bytes
 923             metadata : .....(only available in the variable length field,varchar:2bytes,text、blob:1bytes,time、timestamp、datetime: 1bytes
 924                             blob、float、decimal : 1bytes, char、enum、binary、set: 2bytes(column type id :1bytes metadatea: 1bytes))
 925             bit_filed : 1bytes
 926             crc : 4bytes
 927             .........
 928         :param event_length: 
 929         :return: 
 930         '''
 931         self.read_bytes(table_map_event_fix_length)
 932         database_name_length, = struct.unpack('B',self.read_bytes(1))
 933         database_name,_a, = struct.unpack('{}ss'.format(database_name_length),self.read_bytes(database_name_length+1))
 934         table_name_length, = struct.unpack('B',self.read_bytes(1))
 935         table_name,_a, = struct.unpack('{}ss'.format(table_name_length),self.read_bytes(table_name_length+1))
 936         colums = self.read_uint8()
 937         a = '='
 938         for i in range(colums):
 939             a += 'B'
 940         colums_type_id_list = list(struct.unpack(a,self.read_bytes(colums)))
 941         self.read_bytes(1)
 942         metadata_dict = {}
 943         bytes = 1
 944         for idex in range(len(colums_type_id_list)):
 945             if colums_type_id_list[idex] in [column_type_dict.MYSQL_TYPE_VAR_STRING,column_type_dict.MYSQL_TYPE_VARCHAR]:
 946                 metadata = self.read_uint16()
 947                 metadata_dict[idex] = 2 if metadata > 255 else 1
 948                 bytes += 2
 949             elif colums_type_id_list[idex] in [column_type_dict.MYSQL_TYPE_BLOB,column_type_dict.MYSQL_TYPE_MEDIUM_BLOB,column_type_dict.MYSQL_TYPE_LONG_BLOB,
 950                                                column_type_dict.MYSQL_TYPE_TINY_BLOB,column_type_dict.MYSQL_TYPE_JSON]:
 951                 metadata = self.read_uint8()
 952                 metadata_dict[idex] = metadata
 953                 bytes += 1
 954             elif colums_type_id_list[idex] in [column_type_dict.MYSQL_TYPE_TIMESTAMP2 ,column_type_dict.MYSQL_TYPE_DATETIME2 ,column_type_dict.MYSQL_TYPE_TIME2]:
 955                 metadata = self.read_uint8()
 956                 metadata_dict[idex] = metadata
 957                 bytes += 1
 958             elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_NEWDECIMAL:
 959                 precision = self.read_uint8()
 960                 decimals = self.read_uint8()
 961                 metadata_dict[idex] = [precision,decimals]
 962                 bytes += 2
 963             elif colums_type_id_list[idex] in  [column_type_dict.MYSQL_TYPE_FLOAT ,column_type_dict.MYSQL_TYPE_DOUBLE]:
 964                 metadata = self.read_uint8()
 965                 metadata_dict[idex] = metadata
 966                 bytes += 1
 967             elif colums_type_id_list[idex] in [column_type_dict.MYSQL_TYPE_STRING]:
 968                 _type,metadata, = struct.unpack('=BB',self.read_bytes(2))
 969                 colums_type_id_list[idex] = _type
 970                 metadata_dict[idex] = metadata
 971                 bytes += 2
 972 
 973         if self.__packet is None:
 974             self.file_data.seek(event_length - binlog_event_header_len - table_map_event_fix_length - 5 - database_name_length
 975                        - table_name_length - colums  - bytes,1)
 976         return database_name,table_name,colums_type_id_list,metadata_dict
 977 
 978     def read_gtid_event(self,event_length=None):
 979 
 980         self.read_bytes(1)
 981         uuid = self.read_bytes(16)
 982         gtid = "%s%s%s%s-%s%s-%s%s-%s%s-%s%s%s%s%s%s" %\
 983                tuple("{0:02x}".format(ord(c)) for c in uuid)
 984         gno_id = self.read_uint64()
 985         gtid += ":{}".format(gno_id)
 986         if self.__packet is None:
 987             self.file_data.seek(event_length - 1 - 16 - 8 - binlog_event_header_len,1)
 988         return gtid
 989 
 990     def read_xid_variable(self):
 991         xid_num = self.read_uint64()
 992         return xid_num
 993 
 994     def __read_decode(self,count):
 995         _value = self.read_bytes(count)
 996         return struct.unpack('{}s'.format(count),_value)[0]
 997 
 998     def read_row_event(self,event_length,colums_type_id_list,metadata_dict,type,packet=None):
 999         '''
1000         fixed_part: 10bytes
1001             table_id: 6bytes
1002             reserved: 2bytes
1003             extra: 2bytes
1004         variable_part:
1005             columns: 1bytes
1006             variable_sized: int((n+7)/8) n=columns.value
1007             variable_sized: int((n+7)/8) (for updata_row_event only)
1008             
1009             variable_sized: int((n+7)/8)
1010             row_value : variable size
1011             
1012             crc : 4bytes
1013             
1014         The The data first length of the varchar type more than 255 are 2 bytes
1015         '''
1016         self.read_bytes(fix_length+binlog_row_event_extra_headers)
1017         columns = self.read_uint8()
1018         columns_length = (columns+7)/8
1019         self.read_bytes(columns_length)
1020         if type == binlog_events.UPDATE_ROWS_EVENT:
1021             self.read_bytes(columns_length)
1022             bytes = binlog_event_header_len + fix_length + binlog_row_event_extra_headers + 1 + columns_length + columns_length
1023         else:
1024             bytes = binlog_event_header_len + fix_length + binlog_row_event_extra_headers + 1 + columns_length
1025         __values = []
1026         while event_length - bytes > binlog_quer_event_stern:
1027             values = []
1028             null_bit = self.read_bytes(columns_length)
1029             bytes += columns_length
1030             for idex in range(len(colums_type_id_list)):
1031                 if self.__is_null(null_bit, idex):
1032                     values.append('Null')
1033                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_TINY:
1034                     try:
1035                         values.append(self.read_uint8())
1036                     except:
1037                         values.append(self.read_int8())
1038                     bytes += 1
1039                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_SHORT:
1040                     try:
1041                         values.append(self.read_uint16())
1042                     except:
1043                         values.append(self.read_int16())
1044                     bytes += 2
1045                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_INT24:
1046                     try:
1047                         values.append(self.read_uint24())
1048                     except:
1049                         values.append(self.read_int24())
1050                     bytes += 3
1051                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_LONG:
1052                     try:
1053                         values.append(self.read_uint32())
1054                     except:
1055                         values.append(self.read_int32())
1056                     bytes += 4
1057                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_LONGLONG:
1058                     try:
1059                         values.append(self.read_uint64())
1060                     except:
1061                         values.append(self.read_int64())
1062                     bytes += 8
1063 
1064                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_NEWDECIMAL:
1065                     _list = metadata_dict[idex]
1066                     decimals,read_bytes = self.__read_new_decimal(precision=_list[0],decimals=_list[1])
1067                     values.append(decimals)
1068                     bytes += read_bytes
1069 
1070                 elif colums_type_id_list[idex] in [column_type_dict.MYSQL_TYPE_DOUBLE ,column_type_dict.MYSQL_TYPE_FLOAT]:
1071                     _read_bytes = metadata_dict[idex]
1072                     if _read_bytes == 8:
1073                         _values, = struct.unpack('<d',self.read_bytes(_read_bytes))
1074                     elif _read_bytes == 4:
1075                         _values, = struct.unpack('<f', self.read_bytes(4))
1076                     values.append(_values)
1077                     bytes += _read_bytes
1078 
1079                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_TIMESTAMP2:
1080                     _time,read_bytes = self.__add_fsp_to_time(datetime.datetime.fromtimestamp(self.read_int_be_by_size(4)),metadata_dict[idex])
1081                     values.append(str(_time))
1082                     bytes += read_bytes + 4
1083                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_DATETIME2:
1084                     _time,read_bytes = self.__read_datetime2(metadata_dict[idex])
1085                     values.append(str(_time))
1086                     bytes += 5 + read_bytes
1087                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_YEAR:
1088                     _date = self.read_uint8() + 1900
1089                     values.append(_date)
1090                     bytes += 1
1091                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_DATE:
1092                     _time = self.__read_date()
1093                     values.append(str(_time))
1094                     bytes += 3
1095 
1096                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_TIME2:
1097                     _time,read_bytes = self.__read_time2(metadata_dict[idex])
1098                     bytes += read_bytes
1099                     values.append(str(_time))
1100 
1101                 elif colums_type_id_list[idex] in [column_type_dict.MYSQL_TYPE_VARCHAR ,column_type_dict.MYSQL_TYPE_VAR_STRING ,column_type_dict.MYSQL_TYPE_BLOB,
1102                                                    column_type_dict.MYSQL_TYPE_TINY_BLOB,column_type_dict.MYSQL_TYPE_LONG_BLOB,column_type_dict.MYSQL_TYPE_MEDIUM_BLOB]:
1103                     _metadata = metadata_dict[idex]
1104                     value_length = self.read_uint_by_size(_metadata)
1105                     '''
1106                     if _metadata == 1:
1107                         value_length = self.read_uint8()
1108                     elif _metadata == 2:
1109                         value_length = self.read_uint16()
1110                     elif _metadata == 3:
1111                         value_length = self.read_uint24()
1112                     elif _metadata == 4:
1113                         value_length = self.read_uint32()
1114                     '''
1115                     values.append(str(self.__read_decode(value_length)))
1116                     bytes += value_length + _metadata
1117                 elif colums_type_id_list[idex] in [column_type_dict.MYSQL_TYPE_JSON]:
1118                     _metadata = metadata_dict[idex]
1119                     value_length = self.read_uint_by_size(_metadata)
1120                     values.append(str(self.read_binary_json(value_length)))
1121                     bytes += value_length + _metadata
1122                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_STRING:
1123                     _metadata = metadata_dict[idex]
1124                     if _metadata <= 255:
1125                         value_length = self.read_uint8()
1126                         values.append(str(self.__read_decode(value_length)))
1127                         _read = 1
1128                     else:
1129                         value_length = self.read_uint16()
1130                         values.append(str(self.__read_decode(value_length)))
1131                         _read = 2
1132                     bytes += value_length + _read
1133                 elif colums_type_id_list[idex] == column_type_dict.MYSQL_TYPE_ENUM:
1134                     _metadata = metadata_dict[idex]
1135                     if _metadata == 1:
1136                         values.append('@'+str(self.read_uint8()))
1137                     elif _metadata == 2:
1138                         values.append('@'+str(self.read_uint16()))
1139                     bytes += _metadata
1140 
1141             if type == binlog_events.UPDATE_ROWS_EVENT:
1142                 __values.append(values)
1143             else:
1144                 super(Read,self).TractionVlues(after_value=values,type=type)
1145         if self.__packet is None:
1146             self.file_data.seek(event_length - bytes,1)
1147         return __values
1148 
1149 
1150 
1151     def write_row_event(self,event_length,colums_type_id_list,metadata_dict,type):
1152         self.read_row_event(event_length,colums_type_id_list,metadata_dict,type)
1153 
1154     def delete_row_event(self,event_length,colums_type_id_list,metadata_dict,type):
1155         self.read_row_event(event_length, colums_type_id_list, metadata_dict,type)
1156 
1157     def update_row_event(self,event_length,colums_type_id_list,metadata_dict,type):
1158         values = self.read_row_event(event_length,colums_type_id_list,metadata_dict,type)
1159         __values = [values[i:i+2] for i in xrange(0,len(values),2)]
1160         for i in range(len(__values)):
1161             super(Read,self).TractionVlues(before_value=__values[i][0],after_value=__values[i][1],type=type)
1162 
1163 
1164 
1165 class CheckEvent(Echo):
1166     def __init__(self,filename=None,start_position=None,stop_position=None,
1167                  start_datetime=None,stop_datetime=None,_thread_id=None,gtid=None,rollback=None,
1168                  user=None,host=None,passwd=None,port=None):
1169         self.cloums_type_id_list = None
1170         self.metadata_dict = None
1171         self._gtid = None
1172         self._thread_id_status = None
1173         self._func = None
1174 
1175         self.start_position = start_position
1176         self.stop_position = stop_position
1177         self.start_datetime = start_datetime
1178         self.stop_datetime = stop_datetime
1179         self._thread_id = _thread_id
1180         self.gtid = gtid
1181         if rollback:
1182             if user is None or host is None or passwd is None:
1183                 Usage()
1184                 sys.exit()
1185             _rollback.rollback_status = True
1186             _rollback._myfunc = GetRollStatement(host=host,user=user,passwd=passwd,port=(port if port else 3306))
1187             if os.path.exists('tmp_rollback'):
1188                 os.remove('tmp_rollback')
1189             _rollback._myfile = open('tmp_rollback','a+')
1190 
1191         self._pos = start_position if start_position else 4
1192         if filename is None:
1193             print 'NO SUCH FILE '
1194             Usage()
1195             sys.exit()
1196 
1197         self.readbinlog = Read(start_position=start_position,filename=filename)
1198         self.__read()
1199 
1200     def __gtid_event_filter(self,type,type_code=None,event_length=None,execute_time=None):
1201         if type_code is None and event_length is None and execute_time is None:
1202             type_code, event_length, execute_time = self.readbinlog.read_header()
1203         while True:
1204             if type == 'pos' and self._pos > self.stop_position and self.stop_position \
1205                     and type_code == binlog_events.GTID_LOG_EVENT:
1206                 break
1207             elif type == 'datetime' and self.stop_datetime and execute_time > self.stop_datetime:
1208                 break
1209             if type_code is None:
1210                 break
1211             if self._gtid is None:
1212                 if type_code == binlog_events.GTID_LOG_EVENT:
1213                     gtid = self.readbinlog.read_gtid_event(event_length)
1214                     if gtid == self.gtid:
1215                         self._gtid = gtid
1216                         self.Gtid(execute_time, gtid, self._pos)
1217                 else:
1218                     self.readbinlog.file_data.seek(event_length-binlog_event_header_len,1)
1219             else:
1220                 if type_code == binlog_events.GTID_LOG_EVENT:
1221                     break
1222                 self.__read_event(type_code,event_length,execute_time)
1223             self._pos += event_length
1224             type_code, event_length, execute_time = self.readbinlog.read_header()
1225 
1226     def __thread_id_filed(self,type,type_code=None,event_length=None,execute_time=None):
1227         if type_code is None and event_length is None and execute_time is None:
1228             type_code, event_length, execute_time = self.readbinlog.read_header()
1229         while True:
1230             if type == 'pos' and self._pos > self.stop_position and self.stop_position \
1231                     and type_code == binlog_events.GTID_LOG_EVENT:
1232                 break
1233             elif type == 'datetime' and self.stop_datetime and execute_time > self.stop_datetime:
1234                 break
1235             if type_code is None:
1236                 break
1237 
1238             if type_code == binlog_events.QUERY_EVENT and self._thread_id_status is None:
1239                 thread_id, database_name, sql_statement = self.readbinlog.read_query_event(event_length)
1240                 if thread_id == self._thread_id:
1241                     if self._gtid:
1242                         self.Gtid(execute_time, self._gtid, self._pos)
1243                     self._thread_id_status = True
1244                     self.TractionHeader(thread_id, database_name, sql_statement, execute_time, self._pos)
1245                 self.readbinlog.file_data.seek(4,1)
1246             elif self._thread_id_status and type_code != binlog_events.QUERY_EVENT and type_code != binlog_events.GTID_LOG_EVENT:
1247                 self.__read_event(type_code,event_length,execute_time)
1248             elif type_code == binlog_events.QUERY_EVENT and self._thread_id_status:
1249                 thread_id, database_name, sql_statement = self.readbinlog.read_query_event(event_length)
1250                 if thread_id != self._thread_id:
1251                     self._thread_id_status = None
1252                 else:
1253                     self.TractionHeader(thread_id, database_name, sql_statement, execute_time, self._pos)
1254                 self.readbinlog.file_data.seek(4, 1)
1255             elif type_code == binlog_events.GTID_LOG_EVENT and self._thread_id_status is None:
1256                 self._gtid = self.readbinlog.read_gtid_event(event_length)
1257             else:
1258                 self.readbinlog.file_data.seek(event_length-binlog_event_header_len,1)
1259             self._pos += event_length
1260             type_code, event_length, execute_time = self.readbinlog.read_header()
1261 
1262     def __read_event(self,type_code,event_length,execute_time):
1263         if type_code == binlog_events.FORMAT_DESCRIPTION_EVENT:
1264             binlog_ver, server_ver, create_time = self.readbinlog.read_format_desc_event()
1265             self.Version(binlog_ver, server_ver, create_time)
1266             self.readbinlog.file_data.seek(event_length - binlog_event_header_len - read_format_desc_event_length,1)
1267         elif type_code == binlog_events.QUERY_EVENT:
1268             thread_id, database_name, sql_statement = self.readbinlog.read_query_event(event_length)
1269             self.TractionHeader(thread_id, database_name, sql_statement, execute_time, self._pos)
1270             self.readbinlog.file_data.seek(4,1)
1271         elif type_code == binlog_events.XID_EVENT:
1272             xid_num = self.readbinlog.read_xid_variable()
1273             self.Xid(execute_time, xid_num, self._pos)
1274             self.readbinlog.file_data.seek(4,1)
1275         elif type_code == binlog_events.TABLE_MAP_EVENT:
1276             database_name, table_name, self.cloums_type_id_list, self.metadata_dict = self.readbinlog.read_table_map_event(
1277                 event_length)
1278             self.Tablemap(execute_time, table_name)
1279         elif type_code == binlog_events.GTID_LOG_EVENT:
1280             gtid = self.readbinlog.read_gtid_event(event_length)
1281             self.Gtid(execute_time, gtid, self._pos)
1282         elif type_code == binlog_events.WRITE_ROWS_EVENT:
1283             self.readbinlog.write_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, type_code)
1284         elif type_code == binlog_events.DELETE_ROWS_EVENT:
1285             self.readbinlog.delete_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, type_code)
1286         elif type_code == binlog_events.UPDATE_ROWS_EVENT:
1287             self.readbinlog.update_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, type_code)
1288         else:
1289             self.readbinlog.file_data.seek(event_length - binlog_event_header_len,1)
1290 
1291     def __read_binlog(self,type,type_code=None,event_length=None,execute_time=None):
1292         if type_code is None and event_length is None and execute_time is None:
1293             type_code, event_length, execute_time = self.readbinlog.read_header()
1294         while True:
1295             if type == 'pos' and self._pos > self.stop_position and self.stop_position and \
1296                             type_code == binlog_events.GTID_LOG_EVENT:
1297                 break
1298             elif type == 'datetime' and self.stop_datetime and execute_time > self.stop_datetime:
1299                 break
1300             if type_code is None:
1301                 break
1302             self.__read_event(type_code=type_code,event_length=event_length,execute_time=execute_time)
1303             self._pos += event_length
1304             type_code, event_length, execute_time = self.readbinlog.read_header()
1305 
1306     def __read(self):
1307         if self.start_position:
1308             if self.gtid:
1309                 self.__gtid_event_filter('pos')
1310             elif self._thread_id:
1311                 self.__thread_id_filed('pos')
1312             else:
1313                 self.__read_binlog('pos')
1314         elif self.start_datetime:
1315             while True:
1316                 type_code, event_length, execute_time = self.readbinlog.read_header()
1317                 if  execute_time >= self.start_datetime:
1318                     break
1319                 self.readbinlog.file_data.seek(event_length - binlog_event_header_len,1)
1320                 self._pos += event_length
1321             if self.gtid:
1322                 self.__gtid_event_filter('datetime',type_code,event_length,execute_time)
1323             elif self._thread_id:
1324                 self.__thread_id_filed('datetime',type_code,event_length,execute_time)
1325             else:
1326                 self.__read_binlog('datetime',type_code,event_length,execute_time)
1327         else:
1328             if self.gtid:
1329                 self.__gtid_event_filter('pos')
1330             elif self._thread_id:
1331                 self.__thread_id_filed('pos')
1332             else:
1333                 self.__read_binlog('pos')
1334 
1335         self.readbinlog.file_data.close()
1336         if _rollback.rollback_status:
1337             _rollback._myfunc._close()
1338             ps = PrintSql()
1339             ps.read()
1340 
1341 class ReplicationMysql(Echo):
1342     def __init__(self,block = None,server_id = None,log_file = None,
1343                  log_pos = None,host=None,user=None,passwd=None,rollback=None,
1344                  port = None,gtid = None,_thread_id = None,stop_pos=None):
1345         import pymysql
1346         _remote_filed._gtid = gtid
1347         _remote_filed._thread_id = _thread_id
1348 
1349         self._stop_pos = stop_pos
1350         self._log_file = log_file
1351         self._log_pos = log_pos
1352         self.block = block if block != None else False
1353         self.server_id = server_id if server_id != None else 133
1354         self.port = port if port != None else 3306
1355         self.connection = pymysql.connect(host=host,
1356                                      user=user,
1357                                      password=passwd,port=self.port,
1358                                      db='',
1359                                      charset='utf8mb4',
1360                                      cursorclass=pymysql.cursors.DictCursor)
1361         if rollback:
1362             _remote_filed._rollback_status = True
1363             _rollback._myfunc = GetRollStatement(host=host,user=user,passwd=passwd,port=self.port)
1364 
1365         self.ReadPack()
1366 
1367     def __checksum_enabled(self):
1368         """Return True if binlog-checksum = CRC32. Only for MySQL > 5.6"""
1369         with self.connection.cursor() as cur:
1370             sql = 'SHOW GLOBAL VARIABLES LIKE "BINLOG_CHECKSUM";'
1371             cur.execute(sql)
1372             result = cur.fetchone()
1373 
1374         if result is None:
1375             return False
1376         if 'Value' in result and result['Value'] is None:
1377             return False
1378         return True
1379 
1380     def __set_checksum(self):
1381         with self.connection.cursor() as cur:
1382             cur.execute("set @master_binlog_checksum= @@global.binlog_checksum;")
1383 
1384     def GetFile(self):
1385         with self.connection.cursor() as cur:
1386             sql = "show master status;"
1387             cur.execute(sql)
1388             result = cur.fetchone()
1389             return result['File'],result['Position']
1390 
1391     def PackeByte(self):
1392 
1393         COM_BINLOG_DUMP = 0x12
1394 
1395         if self._log_file is None:
1396             if self._log_pos is None:
1397                 self._log_file,self._log_pos = self.GetFile()
1398             else:
1399                 self._log_file,_ = self.GetFile()
1400         elif self._log_file and self._log_pos is None:
1401             self._log_pos = 4
1402 
1403         prelude = struct.pack('<h', len(self._log_file) + 11) \
1404                   + struct.pack("!B", COM_BINLOG_DUMP)
1405 
1406         prelude += struct.pack('<h', self._log_pos)
1407         if self.block:
1408             prelude += struct.pack('<i', 0)
1409         else:
1410             prelude += struct.pack('<I', 1)
1411 
1412         prelude += struct.pack('<I', self.server_id)
1413         prelude += self._log_file.encode()
1414         return prelude
1415 
1416 
1417     def UnPack(self,pack):
1418         #header
1419         next_log_pos = None
1420         unpack = struct.unpack('<cIcIIIH', pack.read(20))
1421         _Read = Read(pack=pack)
1422         if unpack[1] != 0:
1423             execute_time = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(unpack[1]))
1424             if isinstance(unpack[2], int):
1425                 event_type = unpack[2]
1426             else:
1427                 event_type = struct.unpack("!B", unpack[2])[1]
1428             server_id,event_length,next_log_pos = unpack[3],unpack[4],unpack[5]
1429             if event_type == binlog_events.QUERY_EVENT:
1430                 thread_id, database_name, sql_statement = _Read.read_query_event(event_length)
1431                 self.TractionHeader(thread_id, database_name, sql_statement, execute_time, self._log_pos)
1432 
1433             elif event_type == binlog_events.GTID_LOG_EVENT:
1434                 gtid = _Read.read_gtid_event()
1435                 self.Gtid(execute_time,gtid,self._log_pos)
1436 
1437             elif event_type == binlog_events.XID_EVENT:
1438                 xid = _Read.read_xid_variable()
1439                 self.Xid(execute_time,xid,self._log_pos)
1440             elif event_type == binlog_events.TABLE_MAP_EVENT:
1441                 database_name, table_name, self.cloums_type_id_list, self.metadata_dict = _Read.read_table_map_event(event_length)
1442                 self.Tablemap(execute_time, table_name)
1443             elif event_type == binlog_events.WRITE_ROWS_EVENT:
1444                 _Read.write_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, event_type)
1445             elif event_type == binlog_events.DELETE_ROWS_EVENT:
1446                 _Read.delete_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, event_type)
1447             elif event_type == binlog_events.UPDATE_ROWS_EVENT:
1448                 _Read.update_row_event(event_length, self.cloums_type_id_list, self.metadata_dict, event_type)
1449         if next_log_pos:
1450             self._log_pos = next_log_pos
1451 
1452     def ReadPack(self):
1453         _packet = self.PackeByte()
1454         if self.__checksum_enabled():
1455             self.__set_checksum()
1456         import pymysql
1457         if pymysql.__version__ < "0.6":
1458             self.connection.wfile.write(_packet)
1459             self.connection.wfile.flush()
1460         else:
1461             self.connection._write_bytes(_packet)
1462             self.connection._next_seq_id = 1
1463 
1464         while True:
1465             try:
1466                 if pymysql.__version__ < "0.6":
1467                     pkt = self.connection.read_packet()
1468                 else:
1469                     pkt = self.connection._read_packet()
1470 
1471                 self.UnPack(pkt)
1472             except:
1473                 self.connection.close()
1474                 break
1475             if self._stop_pos and self._log_pos > self._stop_pos:
1476                 break
1477 
1478 
1479 def Usage():
1480     __usage__ = """
1481         Usage:
1482         Options:
1483               -h [--help] : print help message
1484               -f [--file] : the file path
1485               --start-position : Start reading the binlog at position N. Applies to the
1486                                     first binlog passed on the command line.
1487             --stop-position :   Stop reading the binlog at position N. Applies to the
1488                                 last binlog passed on the command line.
1489             --start-datetime :   Start reading the binlog at first event having a datetime
1490                                   equal or posterior to the argument; the argument must be
1491                                   a date and time in the local time zone, in any format
1492                                   accepted by the MySQL server for DATETIME and TIMESTAMP
1493                                   types, for example: 2004-12-25 11:25:56 (you should
1494                                   probably use quotes for your shell to set it properly)
1495             --stop-datetime :    Stop reading the binlog at first event having a datetime
1496                                   equal or posterior to the argument; the argument must be
1497                                   a date and time in the local time zone, in any format
1498                                   accepted by the MySQL server for DATETIME and TIMESTAMP
1499                                   types, for example: 2004-12-25 11:25:56 (you should
1500                                   probably use quotes for your shell to set it properly).
1501             -t [--thread-id] :    filter the executing thread id
1502             
1503             -g [--gtid] : obtain a certain GTID content of transactions  
1504             
1505             -r [--rollback] : generate the rollback statement 
1506                 -u [--user] : User for login if not current user
1507                 -p [--passwd] : Password to use when connecting to server
1508                 -H [--host] : Connect to host, default localhost
1509                 -P [--port] : Port number to use for connection ,default 3306
1510             --remote:  Replication slave
1511                 --log-file: Set replication log file, default master current log file
1512                 --start-position: Start replication at log position (resume_stream should be true),default current position
1513                 --stop-position: Stop replication at log position
1514                 --block: Read on stream is blocking
1515                 --server-id:  default 133
1516             """
1517     print __usage__
1518 
1519 def main(argv):
1520     _argv = {}
1521     try:
1522         opts, args = getopt.getopt(argv[1:], 'hrf:t:g:H:p:P:u:', ['help', 'file=', 'start-position=','stop-position=','start-datetime=',
1523                                                           'stop-datetime=','host=','user=','passwd=','port=','thread-id=','gtid=',
1524                                                           'rollback','remote','log-file=','block','server-id='])
1525     except getopt.GetoptError, err:
1526         print str(err)
1527         Usage()
1528         sys.exit(2)
1529     for o, a in opts:
1530         if o in ('-h', '--help'):
1531             Usage()
1532             sys.exit(1)
1533         elif o in ('-f', '--file'):
1534             _argv['file'] = a
1535         elif o in ('--start-position',):
1536             _argv['start-position'] = int(a)
1537         elif o in ('--stop-position',):
1538             _argv['stop-position'] = int(a)
1539         elif o in ('--start-datetime'):
1540             _argv['start-datetime'] = a
1541         elif o in ('--stop-datetime'):
1542             _argv['stop-datetime'] = a
1543         elif o in ('-t','--thread-id'):
1544             _argv['thread-id'] = int(a)
1545         elif o in ('-g','--gtid'):
1546             _argv['gtid'] = a
1547         elif o in ('-r','--rollback'):
1548             _argv['rollback'] = True
1549         elif o in ('-u','--user'):
1550             _argv['user'] = a
1551         elif o in ('-H','--host'):
1552             _argv['host'] = a
1553         elif o in ('-p','--passwd'):
1554             _argv['passwd'] = a
1555         elif o in ('-P','--port'):
1556             _argv['port'] = int(a)
1557         elif o in ('--remote'):
1558             _argv['remote'] = True
1559         elif o in ('--log-file'):
1560             _argv['log-file'] = a
1561         elif o in ('--block'):
1562             _argv['block'] = True
1563         elif o in ('--server-id'):
1564             _argv['server-id'] = int(a)
1565         else:
1566             print 'unhandled option'
1567             sys.exit(3)
1568 
1569     if 'rollback' in _argv:
1570         if 'remote' in _argv:
1571             ReplicationMysql(user=_argv['user'],port=(_argv['port'] if 'port' in _argv else None),
1572                              passwd=_argv['passwd'],host=(_argv['host'] if 'host' in _argv else 'localhost'),
1573                              log_file=(_argv['log-file'] if 'log-file' in _argv else None),
1574                              log_pos=(_argv['start-position'] if 'start-position' in _argv else None),
1575                              stop_pos=(_argv['stop-position'] if 'stop-position' in _argv else None),
1576                              server_id=(_argv['server-id'] if 'server-id' in _argv else None),
1577                              block=(_argv['block'] if 'block' in _argv else None),
1578                              gtid=(_argv['gtid'] if 'gtid' in _argv else None),
1579                              _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None),
1580                              rollback=(_argv['rollback'] if 'rollback' in _argv else None))
1581         elif 'start-position' in _argv:
1582             CheckEvent(filename=_argv['file'],gtid=(_argv['gtid'] if 'gtid' in _argv else None),
1583                        start_position=(_argv['start-position'] if 'start-position' in _argv else None),
1584                        stop_position=(_argv['stop-position'] if 'stop-position' in _argv else None),
1585                        rollback = _argv['rollback'],user=_argv['user'],
1586                        host=(_argv['host'] if 'host' in _argv else 'localhost'),
1587                        passwd=_argv['passwd'],
1588                        port=(_argv['port'] if 'port' in _argv else None),
1589                        _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None))
1590         elif 'gtid' in _argv:
1591             CheckEvent(filename=_argv['file'],
1592                        gtid=(_argv['gtid'] if 'gtid' in _argv else None),rollback=_argv['rollback'],
1593                        user=_argv['user'],
1594                        host=(_argv['host'] if 'host' in _argv else 'localhost'),
1595                        passwd=_argv['passwd'],
1596                        port=(_argv['port'] if 'port' in _argv else None))
1597         else:
1598             CheckEvent(filename=_argv['file'],rollback=_argv['rollback'],user=_argv['user'],
1599                        host=(_argv['host'] if 'host' in _argv else 'localhost'),
1600                        passwd=_argv['passwd'],
1601                        port=(_argv['port'] if 'port' in _argv else None),
1602                        _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None))
1603     elif 'remote' in _argv:
1604         ReplicationMysql(user=_argv['user'], port=(_argv['port'] if 'port' in _argv else None),
1605                          passwd=_argv['passwd'], host=(_argv['host'] if 'host' in _argv else 'localhost'),
1606                          log_file=(_argv['log-file'] if 'log-file' in _argv else None),
1607                          log_pos=(_argv['start-position'] if 'start-position' in _argv else None),
1608                          stop_pos=(_argv['stop-position'] if 'stop-position' in _argv else None),
1609                          server_id=(_argv['server-id'] if 'server-id' in _argv else None),
1610                          block=(_argv['block'] if 'block' in _argv else None),
1611                          gtid=(_argv['gtid'] if 'gtid' in _argv else None),
1612                          _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None),
1613                          rollback=(_argv['rollback'] if 'rollback' in _argv else None))
1614 
1615     elif 'start-position' in _argv:
1616         CheckEvent(start_position=(_argv['start-position'] if _argv['start-position'] else None),
1617                         filename=_argv['file'],gtid=(_argv['gtid'] if 'gtid' in _argv else None),
1618                         stop_position=(_argv['stop-position'] if 'stop-position' in _argv else None),
1619                         _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None))
1620     elif 'start-datetime' in _argv:
1621         CheckEvent(start_datetime=(_argv['start-datetime'] if 'start-datetime' in _argv else None),
1622                         filename=_argv['file'],gtid=(_argv['gtid'] if 'gtid' in _argv else None),
1623                         stop_datetime=(_argv['stop-datetime'] if 'stop-datetime' in _argv else None),
1624                         _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None))
1625     elif 'gtid' in _argv:
1626         CheckEvent(filename=_argv['file'],
1627                         gtid=(_argv['gtid'] if 'gtid' in _argv else None),
1628                         _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None))
1629     else:
1630         CheckEvent(filename=_argv['file'],
1631                         _thread_id=(_argv['thread-id'] if 'thread-id' in _argv else None))
1632 
1633 
1634 
1635 if __name__ == "__main__":
1636     main(sys.argv)
View Code

 

转载于:https://www.cnblogs.com/kelvin19840813/p/8206731.html

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值