2024-03-13 15:09:12 +08:00
|
|
|
|
import concurrent.futures
|
2024-03-07 15:08:07 +08:00
|
|
|
|
import os
|
|
|
|
|
import re
|
|
|
|
|
from log_utils import setup_logger
|
|
|
|
|
from tqdm import tqdm
|
|
|
|
|
|
|
|
|
|
import r2pipe
|
|
|
|
|
import pandas as pd
|
|
|
|
|
|
2024-03-09 15:26:16 +08:00
|
|
|
|
|
2024-03-13 15:09:12 +08:00
|
|
|
|
csv_lock = 0
|
|
|
|
|
|
|
|
|
|
|
2024-03-07 15:08:07 +08:00
|
|
|
|
def Opcode_to_csv(opcode_list, file_type):
|
2024-03-13 15:09:12 +08:00
|
|
|
|
|
2024-03-07 15:08:07 +08:00
|
|
|
|
csv_write(f'output_{file_type}.csv', opcode_list)
|
|
|
|
|
logger.info(f"done {done_file_num} files")
|
2024-03-13 15:09:12 +08:00
|
|
|
|
|
2024-03-07 15:08:07 +08:00
|
|
|
|
|
2024-03-09 15:26:16 +08:00
|
|
|
|
|
2024-03-07 15:08:07 +08:00
|
|
|
|
def csv_write(file_name, data: list):
|
|
|
|
|
"""write data to csv"""
|
2024-03-13 15:09:12 +08:00
|
|
|
|
logger.info("*======================start write==================================*")
|
2024-03-07 15:08:07 +08:00
|
|
|
|
df = pd.DataFrame(data)
|
|
|
|
|
chunksize = 1000
|
|
|
|
|
for i in range(0, len(df), chunksize):
|
|
|
|
|
df.iloc[i:i + chunksize].to_csv(f'./out/{file_name}', mode='a', header=False, index=False)
|
2024-03-13 15:09:12 +08:00
|
|
|
|
logger.info(f"done rows {len(df)}")
|
|
|
|
|
logger.info("*=================write to csv success==============================*")
|
2024-03-07 15:08:07 +08:00
|
|
|
|
return True
|
2024-03-09 15:26:16 +08:00
|
|
|
|
|
|
|
|
|
|
2024-03-07 15:08:07 +08:00
|
|
|
|
def extract_opcode(disasm_text):
|
|
|
|
|
"""
|
|
|
|
|
从反汇编文本中提取操作码和操作数
|
|
|
|
|
正则表达式用于匹配操作码和操作数,考虑到操作数可能包含空格和逗号
|
|
|
|
|
"""
|
|
|
|
|
match = re.search(r"^\s*(\S+)(?:\s+(.*))?$", disasm_text)
|
|
|
|
|
if match:
|
|
|
|
|
opcode = match.group(1)
|
|
|
|
|
# operands_str = match.group(2) if match.group(2) is not None else ""
|
|
|
|
|
# split_pattern = re.compile(r",(?![^\[]*\])") # 用于切分操作数的正则表达式
|
|
|
|
|
# operands = split_pattern.split(operands_str)
|
|
|
|
|
# return opcode, [op.strip() for op in operands if op.strip()]
|
|
|
|
|
return opcode
|
|
|
|
|
return ""
|
|
|
|
|
|
2024-03-09 15:26:16 +08:00
|
|
|
|
|
2024-03-13 15:09:12 +08:00
|
|
|
|
def get_graph_r2pipe(file_type, file_name):
|
2024-03-07 15:08:07 +08:00
|
|
|
|
# 获取基础块内的操作码序列
|
2024-03-13 15:09:12 +08:00
|
|
|
|
r2pipe_open = r2pipe.open(os.path.join(file_path, file_name), flags=['-2'])
|
2024-03-07 15:08:07 +08:00
|
|
|
|
opcode_Sequence = []
|
|
|
|
|
try:
|
|
|
|
|
# 获取函数列表
|
2024-03-13 15:09:12 +08:00
|
|
|
|
r2pipe_open.cmd("aaa")
|
|
|
|
|
r2pipe_open.cmd('e arch=x86')
|
2024-03-07 15:08:07 +08:00
|
|
|
|
function_list = r2pipe_open.cmdj("aflj")
|
|
|
|
|
for function in function_list:
|
2024-04-16 16:27:09 +08:00
|
|
|
|
if function['name'][:4] not in ['fcn.', 'loc.']:
|
|
|
|
|
continue
|
2024-03-07 15:08:07 +08:00
|
|
|
|
block_list = r2pipe_open.cmdj("afbj @" + str(function['offset']))
|
|
|
|
|
block_opcode_Sequence = []
|
|
|
|
|
for block in block_list:
|
|
|
|
|
# print(block)
|
|
|
|
|
# 获取基本块的反汇编指令
|
|
|
|
|
disasm = r2pipe_open.cmdj("pdj " + str(block["ninstr"]) + " @" + str(block["addr"]))
|
|
|
|
|
if disasm:
|
|
|
|
|
for op in disasm:
|
2024-03-13 15:09:12 +08:00
|
|
|
|
if op["type"] == "invalid" or op["opcode"] == "invalid":
|
2024-03-07 15:08:07 +08:00
|
|
|
|
continue
|
|
|
|
|
block_opcode_Sequence.append(extract_opcode(op["opcode"]))
|
2024-03-09 15:26:16 +08:00
|
|
|
|
opcode_Sequence.append(
|
|
|
|
|
[file_type, file_type, len(block_opcode_Sequence), ' '.join(block_opcode_Sequence)])
|
2024-03-13 15:09:12 +08:00
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Error: get function list failed in {file_name}")
|
|
|
|
|
print(f"Error: get function list failed in {file_name} ,error info {e}")
|
|
|
|
|
r2pipe_open.quit()
|
2024-03-07 15:08:07 +08:00
|
|
|
|
return opcode_Sequence
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2024-03-13 15:09:12 +08:00
|
|
|
|
file_type = 'malware'
|
|
|
|
|
logger = setup_logger('logger', f'./log/opcode_{file_type}.log')
|
2024-04-16 16:27:09 +08:00
|
|
|
|
file_path = os.path.join(f'/mnt/d/bishe/dataset/sample_{file_type}')
|
2024-03-13 15:09:12 +08:00
|
|
|
|
print(f"max works {os.cpu_count()}")
|
2024-04-16 16:27:09 +08:00
|
|
|
|
file_list = os.listdir(file_path)[:1000]
|
2024-03-07 15:08:07 +08:00
|
|
|
|
done_file_num = 0
|
2024-03-09 15:26:16 +08:00
|
|
|
|
done_list = [['class', 'sub-class', 'size', 'corpus']]
|
2024-04-16 16:27:09 +08:00
|
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count()/2) as executor: # 调整线程池大小
|
2024-03-13 15:09:12 +08:00
|
|
|
|
future_to_args = {
|
|
|
|
|
executor.submit(get_graph_r2pipe, file_type, file_name): file_name for file_name in file_list
|
|
|
|
|
}
|
2024-04-16 16:27:09 +08:00
|
|
|
|
for future in tqdm(concurrent.futures.as_completed(future_to_args), total=len(future_to_args), desc=f'Processing {file_type}...',):
|
2024-03-13 15:09:12 +08:00
|
|
|
|
try:
|
|
|
|
|
tmp = future.result()
|
|
|
|
|
done_list.extend(tmp if len(tmp) > 0 else [])
|
|
|
|
|
if len(done_list) > 100000:
|
2024-04-16 16:27:09 +08:00
|
|
|
|
csv_write(f'output_{file_type}_test.csv', done_list)
|
2024-03-13 15:09:12 +08:00
|
|
|
|
done_file_num += 1
|
|
|
|
|
done_list.clear()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Error: {e}")
|
|
|
|
|
print(f"Error: {e}")
|
|
|
|
|
else:
|
2024-04-16 16:27:09 +08:00
|
|
|
|
csv_write(f'output_{file_type}_test.csv', done_list)
|
2024-03-13 15:09:12 +08:00
|
|
|
|
|
|
|
|
|
|