2024-04-17 15:54:00 +08:00
import os
2024-04-18 16:44:31 +08:00
from my_utils import save_json , load_json , setup_logger , multi_thread_order , THREAD_HALF , THREAD_FULL
2024-04-17 15:54:00 +08:00
from bert . obtain_inst_vec import bb2vec
import multiprocessing
from tqdm import tqdm
import warnings
2024-04-18 16:44:31 +08:00
from datetime import datetime
2024-04-17 15:54:00 +08:00
def addr2vec ( base_file_path ) :
# 从路径拆分文件名与路径
file_name = str ( os . path . basename ( base_file_path ) )
file_path = str ( os . path . dirname ( base_file_path ) )
# 忽略已生成的文件
if os . path . exists ( os . path . join ( file_path , ' final ' , file_name ) ) :
return
# 如果不是路径则开始转化
if file_name :
file_json = load_json ( base_file_path )
# 确保存在基础文件而不存在特征文件的情况
feature_json = load_json ( os . path . join ( file_path , ' feature ' , file_name ) ) if os . path . exists (
os . path . join ( file_path , ' feature ' , file_name ) ) else None
if feature_json is not None :
feature_set = { }
for item in feature_json :
feature_set [ item [ ' addr ' ] ] = bb2vec ( item [ ' opcode ' ] )
for item in file_json [ ' acfg_list ' ] :
bb_feature_addr_list = item [ ' block_features ' ]
item [ ' block_features ' ] = [ feature_set [ key ] for key in bb_feature_addr_list ]
save_json ( os . path . join ( file_path , ' final ' , file_name ) , file_json )
else :
logger . error ( f ' 文件 { file_name } 不存在特征文件 ' )
return
if __name__ == ' __main__ ' :
logger = setup_logger ( ' feature2json ' , ' ./log/feature2json.log ' )
2024-04-18 16:44:31 +08:00
sample_type = ' malware '
# json_path = os.path.join(f'./out/json/{sample_type}')
2024-04-17 15:54:00 +08:00
json_path = os . path . join ( f ' ./out/json/ { sample_type } ' )
json_files = os . listdir ( json_path )
2024-04-18 16:44:31 +08:00
now = datetime . now ( )
formatted_now = now . strftime ( " % Y- % m- %d % H: % M: % S " )
print ( " start time: " , formatted_now )
# with multiprocessing.Pool(processes=os.cpu_count()) as pool:
# result = list(tqdm(pool.imap_unordered(addr2vec, [os.path.join(json_path, file) for file in json_files[:1] if os.path.isfile(os.path.join(json_path, file))]),
# total=len(json_files)))
multi_thread_order ( addr2vec , [ os . path . join ( json_path , file ) for file in json_files [ : : - 1 ] if os . path . isfile ( os . path . join ( json_path , file ) ) ] , thread_num = THREAD_FULL )
2024-04-17 15:54:00 +08:00
# for json_file in json_files:
# addr2vec(json_path, json_file)