|
@@ -0,0 +1,364 @@
|
|
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
|
|
+from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
|
|
|
+from odoo import fields, models
|
|
|
|
|
+from odoo.exceptions import UserError
|
|
|
|
|
+from urllib.parse import quote
|
|
|
|
|
+import httpx
|
|
|
|
|
+import subprocess
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+class ClashTools(models.Model):
|
|
|
|
|
+ _name = 'clash.tools'
|
|
|
|
|
+ _description = 'Clash Tools'
|
|
|
|
|
+
|
|
|
|
|
+ name = fields.Char('Name')
|
|
|
|
|
+
|
|
|
|
|
+ localhost_ip = fields.Char('Localhost IP')
|
|
|
|
|
+
|
|
|
|
|
+ api_ip = fields.Char('API IP')
|
|
|
|
|
+
|
|
|
|
|
+ current_node = fields.Char('Current Node')
|
|
|
|
|
+
|
|
|
|
|
+ skip_node = fields.Char('Skip Node', help='Use semicolons to separate')
|
|
|
|
|
+
|
|
|
|
|
+ total_nodes = fields.Integer('Total Nodes')
|
|
|
|
|
+
|
|
|
|
|
+ use_type = fields.Selection([
|
|
|
|
|
+ ('web3', 'WEB3'),
|
|
|
|
|
+ ('depin', 'Depin'),
|
|
|
|
|
+ ], string='Use Type', default='')
|
|
|
|
|
+
|
|
|
|
|
+ line_ids = fields.One2many('clash.tools.line', 'clash_tools_id', string='Line')
|
|
|
|
|
+
|
|
|
|
|
+ def btn_get_all_node(self):
|
|
|
|
|
+ for rec in self:
|
|
|
|
|
+ if not rec.localhost_ip:
|
|
|
|
|
+ continue
|
|
|
|
|
+
|
|
|
|
|
+ # 先获取所有节点
|
|
|
|
|
+ url = rec.localhost_ip
|
|
|
|
|
+ if 'https' in url:
|
|
|
|
|
+ raise UserError('Local network services do not require HTTPS.')
|
|
|
|
|
+ if 'http' not in url:
|
|
|
|
|
+ url = 'http://' + url
|
|
|
|
|
+
|
|
|
|
|
+ self._set_global_proxy(url, rec)
|
|
|
|
|
+
|
|
|
|
|
+ proxies_list = self._get_all_node(url, rec)
|
|
|
|
|
+
|
|
|
|
|
+ if proxies_list:
|
|
|
|
|
+ rec.total_nodes = len(proxies_list)
|
|
|
|
|
+
|
|
|
|
|
+ # 清空当前 line
|
|
|
|
|
+ rec.line_ids.unlink()
|
|
|
|
|
+
|
|
|
|
|
+ # 循环添加节点到 line
|
|
|
|
|
+ for proxies in proxies_list:
|
|
|
|
|
+ if proxies == "DIRECT" or proxies == "REJECT" or proxies == "GLOBAL":
|
|
|
|
|
+ continue
|
|
|
|
|
+ rec.line_ids.create({
|
|
|
|
|
+ 'name': proxies,
|
|
|
|
|
+ 'clash_tools_id': rec.id
|
|
|
|
|
+ })
|
|
|
|
|
+
|
|
|
|
|
+ def btn_check_all_node(self):
|
|
|
|
|
+ for rec in self:
|
|
|
|
|
+ if not rec.localhost_ip:
|
|
|
|
|
+ continue
|
|
|
|
|
+ if not rec.line_ids:
|
|
|
|
|
+ self.btn_get_all_node()
|
|
|
|
|
+
|
|
|
|
|
+ url = rec.localhost_ip
|
|
|
|
|
+ if 'https' in url:
|
|
|
|
|
+ raise UserError('Local network services do not require HTTPS.')
|
|
|
|
|
+ if 'http' not in url:
|
|
|
|
|
+ url = 'http://' + url
|
|
|
|
|
+
|
|
|
|
|
+ line_count = len(rec.line_ids)
|
|
|
|
|
+ if line_count:
|
|
|
|
|
+ rec.total_nodes = line_count
|
|
|
|
|
+
|
|
|
|
|
+ with ThreadPoolExecutor(max_workers=line_count) as executor:
|
|
|
|
|
+ # 提交任务到线程池
|
|
|
|
|
+ futures = {executor.submit(self._check_node, quote(line.name, safe=""), url): line for line in rec.line_ids}
|
|
|
|
|
+
|
|
|
|
|
+ # 处理线程池返回的结果
|
|
|
|
|
+ for future in as_completed(futures):
|
|
|
|
|
+ line = futures[future]
|
|
|
|
|
+ try:
|
|
|
|
|
+ res = future.result()
|
|
|
|
|
+ if res != 9999:
|
|
|
|
|
+ line.update({
|
|
|
|
|
+ 'delay': res.setdefault('delay'),
|
|
|
|
|
+ 'mean_delay': res.setdefault('meanDelay'),
|
|
|
|
|
+ 'node_state': 'ok'
|
|
|
|
|
+ })
|
|
|
|
|
+ else:
|
|
|
|
|
+ line.update({
|
|
|
|
|
+ 'delay': 9999,
|
|
|
|
|
+ 'mean_delay': 9999,
|
|
|
|
|
+ 'node_state': 'error'
|
|
|
|
|
+ })
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ print(str(e))
|
|
|
|
|
+ line.update({
|
|
|
|
|
+ 'delay': -1,
|
|
|
|
|
+ 'mean_delay': -1,
|
|
|
|
|
+ 'node_state': 'error'
|
|
|
|
|
+ })
|
|
|
|
|
+
|
|
|
|
|
+ result = rec._get_current_node()
|
|
|
|
|
+ if result:
|
|
|
|
|
+ rec.current_node = result
|
|
|
|
|
+
|
|
|
|
|
+ def btn_select_node(self):
|
|
|
|
|
+ selected_node_list = []
|
|
|
|
|
+ for rec in self:
|
|
|
|
|
+ if not rec.localhost_ip:
|
|
|
|
|
+ continue
|
|
|
|
|
+ if not rec.line_ids:
|
|
|
|
|
+ self.btn_get_all_node()
|
|
|
|
|
+
|
|
|
|
|
+ if not rec.line_ids:
|
|
|
|
|
+ continue
|
|
|
|
|
+
|
|
|
|
|
+ url = rec.localhost_ip
|
|
|
|
|
+ if 'https' in url:
|
|
|
|
|
+ raise UserError('Local network services do not require HTTPS.')
|
|
|
|
|
+ if 'http' not in url:
|
|
|
|
|
+ url = 'http://' + url
|
|
|
|
|
+
|
|
|
|
|
+ # 拿到 line 中, 延迟最小的节点数据
|
|
|
|
|
+ line_delay_min = self.line_ids.search([('clash_tools_id', '=', rec.id), ('node_state', '=', 'ok')], order='delay asc')
|
|
|
|
|
+
|
|
|
|
|
+ for line in line_delay_min:
|
|
|
|
|
+ if rec.skip_node:
|
|
|
|
|
+ try:
|
|
|
|
|
+ skip_node_list = rec.skip_node.split(';')
|
|
|
|
|
+ except:
|
|
|
|
|
+ raise UserError('Please enter the node name to skip, separated by semicolons.')
|
|
|
|
|
+
|
|
|
|
|
+ # 查看是否存在需要跳过的节点, 如果是, 则跳过
|
|
|
|
|
+ for skip_node in skip_node_list:
|
|
|
|
|
+ if skip_node in line.name:
|
|
|
|
|
+ continue
|
|
|
|
|
+ else:
|
|
|
|
|
+ # 这里不是跳过, 然后检查一下有没使用过这个节点
|
|
|
|
|
+ if line.name in selected_node_list:
|
|
|
|
|
+ continue
|
|
|
|
|
+ else:
|
|
|
|
|
+ # 这里是既不是跳过, 有没有使用过, 就使用这个节点
|
|
|
|
|
+ self._use_select_node(line)
|
|
|
|
|
+ selected_node_list.append(line.name)
|
|
|
|
|
+ break
|
|
|
|
|
+ else:
|
|
|
|
|
+ # 如果跳过节点的条件为空, 则判断是否使用过这个节点, 没有就使用
|
|
|
|
|
+ if line.name in selected_node_list:
|
|
|
|
|
+ continue
|
|
|
|
|
+
|
|
|
|
|
+ self._use_select_node(line)
|
|
|
|
|
+ selected_node_list.append(line.name)
|
|
|
|
|
+ break
|
|
|
|
|
+
|
|
|
|
|
+ def _set_global_proxy(self, url, rec):
|
|
|
|
|
+ setting_url = url + '/api/configs'
|
|
|
|
|
+
|
|
|
|
|
+ headers = {
|
|
|
|
|
+ "Accept": "application/json, text/plain, */*",
|
|
|
|
|
+ "Accept-Encoding": "gzip, deflate",
|
|
|
|
|
+ "Accept-Language": "zh-CN,zh;q=0.8",
|
|
|
|
|
+ "Connection": "keep-alive",
|
|
|
|
|
+ "Content-Type": "application/json",
|
|
|
|
|
+ "Host": rec.localhost_ip,
|
|
|
|
|
+ "Origin": url,
|
|
|
|
|
+ "Referer": url,
|
|
|
|
|
+ "Sec-Gpc": "1",
|
|
|
|
|
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ data = {
|
|
|
|
|
+ "mode": "Global"
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ # 发送PATCH请求
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = httpx.patch(setting_url, headers=headers, json=data)
|
|
|
|
|
+ if response.status_code != 204:
|
|
|
|
|
+ raise UserError(f"{rec.name} Failed to set global proxy. Status code: {response.status_code}")
|
|
|
|
|
+ except httpx.RequestError as e:
|
|
|
|
|
+ print("Request failed:", e)
|
|
|
|
|
+
|
|
|
|
|
+ def _get_all_node(self, url, rec):
|
|
|
|
|
+ proxies_list_url = url + '/api/proxies'
|
|
|
|
|
+
|
|
|
|
|
+ headers = {
|
|
|
|
|
+ "Accept": "application/json, text/plain, */*",
|
|
|
|
|
+ "Accept-Encoding": "gzip, deflate",
|
|
|
|
|
+ "Accept-Language": "zh-CN,zh;q=0.8",
|
|
|
|
|
+ "Connection": "keep-alive",
|
|
|
|
|
+ "Host": rec.localhost_ip,
|
|
|
|
|
+ "Referer": url,
|
|
|
|
|
+ "Sec-Gpc": "1",
|
|
|
|
|
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ response = httpx.get(proxies_list_url, headers=headers)
|
|
|
|
|
+
|
|
|
|
|
+ result = []
|
|
|
|
|
+
|
|
|
|
|
+ proxies_list = response.json()
|
|
|
|
|
+ for proxies in proxies_list.get('proxies'):
|
|
|
|
|
+ result.append(proxies)
|
|
|
|
|
+
|
|
|
|
|
+ return result
|
|
|
|
|
+
|
|
|
|
|
+ def _check_node(self, encode_proxy_name, url):
|
|
|
|
|
+ command = [
|
|
|
|
|
+ "curl",
|
|
|
|
|
+ "-X", "GET",
|
|
|
|
|
+ f"{url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
|
|
|
|
|
+ ]
|
|
|
|
|
+
|
|
|
|
|
+ try:
|
|
|
|
|
+ result = subprocess.run(command, capture_output=True, text=True, check=True)
|
|
|
|
|
+ if 'Timeout' in result.stdout:
|
|
|
|
|
+ return 9999
|
|
|
|
|
+ if 'An error occurred in the delay test' in result.stdout:
|
|
|
|
|
+ return 9999
|
|
|
|
|
+ res = eval(result.stdout)
|
|
|
|
|
+ return res
|
|
|
|
|
+ except subprocess.CalledProcessError as e:
|
|
|
|
|
+ return 9999
|
|
|
|
|
+
|
|
|
|
|
+ def _get_current_node(self):
|
|
|
|
|
+ url = self.localhost_ip
|
|
|
|
|
+ if 'https' in url:
|
|
|
|
|
+ raise UserError('Local network services do not require HTTPS.')
|
|
|
|
|
+ if 'http' not in url:
|
|
|
|
|
+ url = 'http://' + url
|
|
|
|
|
+ headers = {
|
|
|
|
|
+ "Accept": "application/json, text/plain, */*",
|
|
|
|
|
+ "Accept-Encoding": "gzip, deflate, br, zstd",
|
|
|
|
|
+ "Accept-Language": "zh-CN,zh;q=0.8",
|
|
|
|
|
+ "Connection": "keep-alive",
|
|
|
|
|
+ "Host": self.localhost_ip,
|
|
|
|
|
+ "Referer": url,
|
|
|
|
|
+ "Sec-CH-UA": '"Chromium";v="134", "Not:A-Brand";v="24", "Brave";v="134"',
|
|
|
|
|
+ "Sec-CH-UA-Mobile": "?0",
|
|
|
|
|
+ "Sec-CH-UA-Platform": '"macOS"',
|
|
|
|
|
+ "Sec-Fetch-Dest": "empty",
|
|
|
|
|
+ "Sec-Fetch-Mode": "cors",
|
|
|
|
|
+ "Sec-Fetch-Site": "same-origin",
|
|
|
|
|
+ "Sec-GPC": "1",
|
|
|
|
|
+ "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = httpx.get(url + '/api/proxies', headers=headers)
|
|
|
|
|
+
|
|
|
|
|
+ if not response.json() or response.status_code != 200:
|
|
|
|
|
+ print("JSON data is empty or request failed")
|
|
|
|
|
+ return ''
|
|
|
|
|
+ json_data = response.json()
|
|
|
|
|
+ proxies = json_data.get("proxies")
|
|
|
|
|
+ proxy_global = proxies.get("GLOBAL")
|
|
|
|
|
+ now_proxy = proxy_global.get("now")
|
|
|
|
|
+ return now_proxy
|
|
|
|
|
+ except httpx.RequestError as e:
|
|
|
|
|
+ print(f"Request failed: {e}")
|
|
|
|
|
+ return ''
|
|
|
|
|
+
|
|
|
|
|
+ def _use_select_node(self, line):
|
|
|
|
|
+ url = line.clash_tools_id.localhost_ip
|
|
|
|
|
+ if 'https' in url:
|
|
|
|
|
+ raise UserError('Local network services do not require HTTPS.')
|
|
|
|
|
+ if 'http' not in url:
|
|
|
|
|
+ url = 'http://' + url
|
|
|
|
|
+
|
|
|
|
|
+ url = url + "/api/proxies/GLOBAL"
|
|
|
|
|
+ data = {"name": line.name}
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = httpx.put(url, json=data)
|
|
|
|
|
+ if response.status_code == 204:
|
|
|
|
|
+ print(f"{line.clash_tools_id.localhost_ip} Switched to proxy: {line.name}")
|
|
|
|
|
+ line.clash_tools_id.update({'current_node': line.clash_tools_id._get_current_node()})
|
|
|
|
|
+ else:
|
|
|
|
|
+ print(f"Failed to switch proxy: {response.status_code} - {line.name}")
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ print(f"Failed to switch proxy: {e}")
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+class ClashToolsLine(models.Model):
|
|
|
|
|
+ _name = 'clash.tools.line'
|
|
|
|
|
+ _description = 'Clash Tools Line'
|
|
|
|
|
+ _order = 'delay ASC'
|
|
|
|
|
+
|
|
|
|
|
+ name = fields.Char('Name')
|
|
|
|
|
+
|
|
|
|
|
+ delay = fields.Integer('Delay')
|
|
|
|
|
+
|
|
|
|
|
+ mean_delay = fields.Integer('Mean Delay')
|
|
|
|
|
+
|
|
|
|
|
+ node_state = fields.Selection([
|
|
|
|
|
+ ('error', 'Error'),
|
|
|
|
|
+ ('ok', 'OK'),
|
|
|
|
|
+ ], string='Node State', default='')
|
|
|
|
|
+
|
|
|
|
|
+ clash_tools_id = fields.Many2one('clash.tools', string='Clash Tools')
|
|
|
|
|
+
|
|
|
|
|
+ def btn_use_this_node(self):
|
|
|
|
|
+ url = self.clash_tools_id.localhost_ip
|
|
|
|
|
+ if 'https' in url:
|
|
|
|
|
+ raise UserError('Local network services do not require HTTPS.')
|
|
|
|
|
+ if 'http' not in url:
|
|
|
|
|
+ url = 'http://' + url
|
|
|
|
|
+
|
|
|
|
|
+ url = url + "/api/proxies/GLOBAL"
|
|
|
|
|
+ data = {"name": self.name}
|
|
|
|
|
+ try:
|
|
|
|
|
+ response = httpx.put(url, json=data)
|
|
|
|
|
+ if response.status_code == 204:
|
|
|
|
|
+ print(f"{self.clash_tools_id.localhost_ip} Switched to proxy: {self.name}")
|
|
|
|
|
+ self.clash_tools_id.update({'current_node': self.clash_tools_id._get_current_node()})
|
|
|
|
|
+ else:
|
|
|
|
|
+ print(f"Failed to switch proxy: {response.status_code} - {self.name}")
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ print(f"Failed to switch proxy: {e}")
|
|
|
|
|
+
|
|
|
|
|
+ def check_single_node(self):
|
|
|
|
|
+ url = self.clash_tools_id.localhost_ip
|
|
|
|
|
+ if 'https' in url:
|
|
|
|
|
+ raise UserError('Local network services do not require HTTPS.')
|
|
|
|
|
+ if 'http' not in url:
|
|
|
|
|
+ url = 'http://' + url
|
|
|
|
|
+
|
|
|
|
|
+ encode_proxy_name = quote(self.name, safe="")
|
|
|
|
|
+
|
|
|
|
|
+ command = [
|
|
|
|
|
+ "curl",
|
|
|
|
|
+ "-X", "GET",
|
|
|
|
|
+ f"{url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
|
|
|
|
|
+ ]
|
|
|
|
|
+
|
|
|
|
|
+ try:
|
|
|
|
|
+ result = subprocess.run(command, capture_output=True, text=True, check=True)
|
|
|
|
|
+ if 'Timeout' in result.stdout:
|
|
|
|
|
+ res = 9999
|
|
|
|
|
+ if 'An error occurred in the delay test' in result.stdout:
|
|
|
|
|
+ res = 9999
|
|
|
|
|
+ res = eval(result.stdout)
|
|
|
|
|
+ except subprocess.CalledProcessError as e:
|
|
|
|
|
+ res = 9999
|
|
|
|
|
+
|
|
|
|
|
+ if res != 9999:
|
|
|
|
|
+ self.update({
|
|
|
|
|
+ 'delay': res.setdefault('delay'),
|
|
|
|
|
+ 'mean_delay': res.setdefault('meanDelay'),
|
|
|
|
|
+ 'node_state': 'ok'
|
|
|
|
|
+ })
|
|
|
|
|
+ else:
|
|
|
|
|
+ self.update({
|
|
|
|
|
+ 'delay': res,
|
|
|
|
|
+ 'mean_delay': res,
|
|
|
|
|
+ 'node_state': 'error'
|
|
|
|
|
+ })
|