21、Core Data Relationships and GUI Updates

Core Data Relationships and GUI Updates

1. Lightweight Migration

When dealing with data storage, an attempt is made to perform a lightweight migration by inferring the mapping. If the mapping can’t be inferred, the process stops, and the method returns nil with an error in the error parameter. In such a case, the code [[NSApplication sharedApplication] presentError:error] is executed, presenting an error stating “The managed object model version used to open the persistent store is incompatible with the one that was used to create the persistent store.”

However, in this scenario, the data model allows for the inference of the mapping. So, the lightweight migration is carried out automatically, with the old data file backed

import idc import ida_bytes import ida_funcs import ida_search import ida_ida import ida_segment import ida_xref import ida_name import idautils import json # ==================== CONFIGURATION ==================== MODE = "GENERATE" # "GENERATE" or "FIND" SIG_FILE = "anogs_signatures.json" PATTERN_LENGTH = 32 # Bytes to use for signature # List of functions with old offsets (used as hints in GENERATE mode) functions = [ ("mrpccs_initialization", 0x249988), ("mrmponi_th_fb_fo", 0x259E48), ("monitor", 0x4BF5BC), ("front_back_string_node", 0x6C7DA0), ("lock_front_back_string_node", 0x262228), ("front_back_validation", 0x24E3E4), ("tdm_dev_info", 0x4BEE370), ("tdm_report", 0x4BEE88), ("Pushantidata3", 0x395AC4), ("memcpy_scanning", 0x2DEC64), ("info_protection_flash", 0x15419C), ("case_thread", 0x1423D8), ("case16", 0x1EB7BC), ("case23", 0x1D17D4), ("case35", 0x2A681C), ("solve_case35", 0x322884), ("case37", 0x1DDCC4), ("case38", 0x1DDE64), ("case47", 0x48D348) ] # Semantic pattern definitions with functional descriptions SEMANTIC_PATTERNS = [ # Memcpy scan guards {"name": "memcpy_scan_guard", "type": "NOP", "context": "memcpy", "count": 4}, # Anti-tamper protections {"name": "anti_flash_protection", "type": "RET", "context": "flash_operations"}, {"name": "anti_crash_mechanism", "type": "MOV", "reg": "X0", "value": "XZR", "context": "reporting"}, # Hash validation patterns {"name": "hash_validation_branch", "type": "B", "pc_offset": 0x90, "count": 3}, {"name": "hash_zero_reg", "type": "MOV", "reg": "W9", "value": "WZR"}, {"name": "hash_context_nop", "type": "NOP", "context": "hash_verification"}, # Data scanning anomalies {"name": "scan_data_abnormal", "type": "B", "pc_offset": 0x74}, {"name": "hash_scan_report", "type": "RET", "context": "data_reporting"}, # TSS SDK anti-data handling {"name": "tss_anti_data_cmp", "type": "CMP", "reg1": "X1", "reg2": "X1"}, {"name": "tss_anti_data_zero", "type": "MOV", "reg": "W0", "value": "WZR"}, # CRC32 validation patterns {"name": "crc32_second_pass", "type": "MOV", "reg": "W0", "value": 1}, {"name": "crc32_load_byte", "type": "LDRB", "context": "crc_calculation", "count": 2}, {"name": "crc32_padding", "type": "NOP", "context": "crc32", "count": 2}, {"name": "crc32_dummy_compare", "type": "CMP", "reg1": "X1", "reg2": "X1", "count": 2}, {"name": "crc32_return_zero", "type": "MOV", "reg": "W0", "value": "WZR", "count": 2}, {"name": "crc32_identity_move", "type": "MOV", "reg1": "W1", "reg2": "W1"} ] # ==================== CORE FUNCTIONALITY ==================== found_functions = {} semantic_matches = {} def find_func_by_any_means(name, hint_addr): """Robust function locator using multiple techniques""" # 1. Direct lookup func = ida_funcs.get_func(hint_addr) if func and func.start_ea == hint_addr: return hint_addr # 2. Nearby search with code validation for delta in (-0x1000, 0, 0x1000): # Check hint and surrounding areas ea = hint_addr + delta func = ida_funcs.get_func(ea) if func and ida_bytes.is_code(ida_bytes.get_flags(func.start_ea)): return func.start_ea # 3. Cross-reference based search for seg in Segments(): if ida_segment.getseg(seg).type != SEG_CODE: continue ea = seg while ea < ida_segment.getseg(seg).end_ea: func = ida_funcs.get_func(ea) if func: # Check function characteristics if func.size() > 0x50: # Minimum function size # Check for common prefixes if name.startswith("memcpy") and "memcpy" in ida_name.get_name(func.start_ea): return func.start_ea # Add more functionality-based checks here ea = func.end_ea else: ea += 1 return None def scan_for_semantic_pattern(pattern): """Search for semantic patterns throughout the binary""" matches = [] seg_start = ida_ida.inf_get_min_ea() seg_end = ida_ida.inf_get_max_ea() ea = seg_start while ea < seg_end: ea = ida_search.find_code(ea, ida_search.SEARCH_DOWN) if ea == ida_ida.BADADDR: break # Get instruction details mnem = idc.print_insn_mnem(ea).upper() op1 = idc.print_operand(ea, 0) op2 = idc.print_operand(ea, 1) # Check pattern type if pattern["type"] == "NOP": if "NOP" in mnem: matches.append(ea) if pattern.get("count") and len(matches) >= pattern["count"]: break elif pattern["type"] == "RET": if "RET" in mnem or "BRK" in mnem: # BRK for anti-debug RETs matches.append(ea) elif pattern["type"] == "MOV": if mnem == "MOV": if pattern.get("value") == "WZR" and "WZR" in op2: matches.append(ea) elif pattern.get("value") == 1 and "#1" in op2: matches.append(ea) elif pattern.get("reg") and pattern["reg"] in op1: matches.append(ea) elif pattern.get("reg1") and pattern["reg1"] in op1 and pattern.get("reg2") and pattern["reg2"] in op2: matches.append(ea) elif pattern["type"] == "B": if mnem == "B": if pattern.get("pc_offset") and f"#{pattern['pc_offset']}" in op1: matches.append(ea) if pattern.get("count") and len(matches) >= pattern["count"]: break elif pattern["type"] == "CMP": if mnem == "CMP" and op1 == op2: # Dummy compare matches.append(ea) elif pattern["type"] == "LDRB": if mnem == "LDRB": matches.append(ea) ea += 4 # Move to next instruction return matches def find_code_xrefs_to_string(substr): """Find functions referencing specific strings""" str_ea = idc.find_text(0, 1, 1, 0, substr) if str_ea == ida_ida.BADADDR: return None for ref in idautils.XrefsTo(str_ea): func = ida_funcs.get_func(ref.frm) if func: return func.start_ea return None # ==================== SIGNATURE HANDLING ==================== def generate_signatures(): """Generate signature database in GENERATE mode""" signature_data = {"functions": [], "semantic_patterns": []} # Generate function signatures for name, offset in functions: start_ea = find_func_by_any_means(name, offset) if not start_ea: print(f"? Function {name} not found near 0x{offset:X}") continue func = ida_funcs.get_func(start_ea) func_len = func.end_ea - start_ea read_len = min(PATTERN_LENGTH, func_len) bytes_data = ida_bytes.get_bytes(start_ea, read_len) if not bytes_data: print(f"? Failed to read bytes for {name} at 0x{start_ea:X}") continue signature_data["functions"].append({ "name": name, "old_offset": offset, "mapped_address": start_ea, "pattern_hex": bytes_data.hex(), "length": read_len }) print(f"? Generated signature for {name} at 0x{start_ea:X}") # Generate semantic pattern signatures for pattern in SEMANTIC_PATTERNS: matches = scan_for_semantic_pattern(pattern) if matches: # Record first match as reference first_match = matches[0] bytes_data = ida_bytes.get_bytes(first_match, 4) signature_data["semantic_patterns"].append({ "name": pattern["name"], "type": pattern["type"], "pattern_hex": bytes_data.hex(), "context": pattern.get("context", ""), "expected_count": pattern.get("count", 1) }) print(f"? Generated semantic pattern for {pattern['name']}") # Save to file with open(SIG_FILE, "w") as f: json.dump(signature_data, f, indent=2) print(f"\n?? Saved {len(signature_data['functions'])} functions and " f"{len(signature_data['semantic_patterns'])} patterns to {SIG_FILE}") def find_functions(): """Locate functions and patterns in FIND mode""" try: with open(SIG_FILE, "r") as f: signature_data = json.load(f) except FileNotFoundError: print(f"? Signature file {SIG_FILE} not found") return # Find functions results = {} for func_data in signature_data["functions"]: name = func_data["name"] pattern_hex = func_data["pattern_hex"] ea = ida_search.find_binary( ida_ida.inf_get_min_ea(), ida_ida.inf_get_max_ea(), pattern_hex, 16, ida_search.SEARCH_DOWN | ida_search.SEARCH_NEXT ) if ea != ida_ida.BADADDR: func = ida_funcs.get_func(ea) if func: ida_name.set_name(func.start_ea, name, ida_name.SN_FORCE) results[name] = func.start_ea print(f"? Found {name} at 0x{func.start_ea:X}") else: print(f"?? Pattern for {name} found at 0x{ea:X} but not in a function") else: print(f"? {name} not found via pattern") # Find semantic patterns semantic_results = {} for pattern in signature_data.get("semantic_patterns", []): matches = scan_for_semantic_pattern({ "type": pattern["type"], "count": pattern.get("expected_count", 1), "context": pattern.get("context", "") }) if matches: semantic_results[pattern["name"]] = matches print(f"? Found {len(matches)} instances of {pattern['name']}") for i, addr in enumerate(matches): print(f" Instance {i+1} at 0x{addr:X}") else: print(f"? Semantic pattern {pattern['name']} not found") # Context-based tracing context_aware_search() # Final report print("\n?? Final Results:") print("Functions found:") for name, addr in results.items(): print(f" {name}: 0x{addr:X}") print("\nSemantic patterns found:") for name, addrs in semantic_results.items(): print(f" {name}: {len(addrs)} locations") def context_aware_search(): """Functionality tracing using contextual relationships""" print("\n?? Starting context-aware tracing...") # 1. Find memcpy function first memcpy_ea = None for seg in Segments(): if "extern" in idc.get_segm_name(seg): memcpy_ea = idc.get_name_ea_simple("memcpy") if memcpy_ea != ida_ida.BADADDR: break # 2. Trace memcpy usage for scan guards if memcpy_ea: print("Tracing memcpy usage for scan guards...") for ref in idautils.CodeRefsTo(memcpy_ea, 0): func = ida_funcs.get_func(ref) if func: # Look for NOP patterns after memcpy calls for head in Heads(func.start_ea, func.end_ea): if idc.print_insn_mnem(head) == "NOP": if "memcpy_scan_guard" not in semantic_matches: semantic_matches["memcpy_scan_guard"] = [] semantic_matches["memcpy_scan_guard"].append(head) # 3. Find report functions report_funcs = ["tdm_report", "mrpcs_abnormal_report"] for name in report_funcs: ea = find_code_xrefs_to_string("report") or find_code_xrefs_to_string("error") if ea: # Look for MOV X0, XZR patterns for head in Heads(ea, ida_funcs.find_func_end(ea)): if (idc.print_insn_mnem(head) == "MOV" and "X0" in idc.print_operand(head, 0) and "XZR" in idc.print_operand(head, 1)): if "error_return" not in semantic_matches: semantic_matches["error_return"] = [] semantic_matches["error_return"].append(head) # 4. Hash function identification crypto_strings = ["SHA", "AES", "crc", "hash"] for s in crypto_strings: ea = find_code_xrefs_to_string(s) if ea: func_name = ida_name.get_name(ea) print(f"Found crypto function: {func_name} at 0x{ea:X}") # Scan for specific hash patterns for head in Heads(ea, ida_funcs.find_func_end(ea)): mnem = idc.print_insn_mnem(head) if mnem == "B" and "[PC,#" in idc.print_operand(head, 0): if "hash_validation_branch" not in semantic_matches: semantic_matches["hash_validation_branch"] = [] semantic_matches["hash_validation_branch"].append(head) elif mnem == "MOV" and "WZR" in idc.print_operand(head, 1): if "hash_zero_reg" not in semantic_matches: semantic_matches["hash_zero_reg"] = [] semantic_matches["hash_zero_reg"].append(head) # ==================== MAIN EXECUTION ==================== if MODE == "GENERATE": generate_signatures() elif MODE == "FIND": find_functions() else: print(f"? Invalid mode: {MODE}. Use 'GENERATE' or 'FIND'") # ==================== SEMANTIC PATTERN REPORT ==================== if semantic_matches: print("\n?? Found semantic patterns through context tracing:") for pattern, addresses in semantic_matches.items(): print(f" {pattern}: {len(addresses)} locations") for addr in addresses: print(f" -> 0x{addr:X}: {idc.generate_disasm_line(addr, 0)}") script got stuck in between please fix with same output result and also sow progress % how much time left while runnung so i can get idea script is running
11-17
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值