# -*- coding: utf-8 -*- from mitmproxy import http, ctx from urllib.parse import urlparse, parse_qs, urlencode, urlunparse import os import re import time import threading from collections import deque
classParameterReplacer: def__init__(self, file_path="output.txt"): self.file_path = file_path self.data_queue = deque() self.lock = threading.Lock() self.last_load_time = 0 self.load_data() defload_data(self): """从文件加载数据到内存队列""" ctx.log.info(f"📂 正在加载数据文件: {self.file_path}") with self.lock: self.data_queue.clear() ifnot os.path.exists(self.file_path): ctx.log.warn(f"⚠️ 文件不存在: {self.file_path}") return try: withopen(self.file_path, 'r', encoding='utf-8') as f: content = f.read() #匹配参数部分 pattern = r'\[([^=]+)=([^\]]+)\]' matches = re.findall(pattern, content) formatchin matches: param_name, param_value = match self.data_queue.append({ 'param_name': param_name.strip(), 'param_value': param_value.strip() }) ctx.log.info(f"✅ 已加载 {len(self.data_queue)} 组参数数据") self.last_load_time = time.time() except Exception as e: ctx.log.error(f"❌ 加载数据失败: {str(e)}") defget_next_data(self): """获取下一组数据并从队列中移除""" with self.lock: ifnot self.data_queue: # 每5分钟尝试重新加载文件 if time.time() - self.last_load_time > 300: self.load_data() returnNone data = self.data_queue.popleft() ctx.log.info(f"🔧 使用参数: {data['param_name']}={data['param_value'][:30]}...") return data defsave_remaining_data(self): """保存剩余数据回文件""" with self.lock: ifnot self.data_queue: try: withopen(self.file_path, 'w', encoding='utf-8') as f: f.write("# ==================================================\n") ctx.log.info("✅ 参数数据已全部使用,文件已清空") except Exception as e: ctx.log.error(f"❌ 清空文件失败: {str(e)}") return try: withopen(self.file_path, 'w', encoding='utf-8') as f: for data in self.data_queue: line = f"[{data['param_name']}={data['param_value']}]\n" f.write(line) ctx.log.info(f"💾 已保存 {len(self.data_queue)} 组剩余参数数据") except Exception as e: ctx.log.error(f"❌ 保存数据失败: {str(e)}")