2
0

clash_tools.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364
  1. # -*- coding: utf-8 -*-
  2. from concurrent.futures import ThreadPoolExecutor, as_completed
  3. from odoo import fields, models
  4. from odoo.exceptions import UserError
  5. from urllib.parse import quote
  6. import httpx
  7. import subprocess
  8. class ClashTools(models.Model):
  9. _name = 'clash.tools'
  10. _description = 'Clash Tools'
  11. name = fields.Char('Name')
  12. localhost_ip = fields.Char('Localhost IP')
  13. api_ip = fields.Char('API IP')
  14. current_node = fields.Char('Current Node')
  15. skip_node = fields.Char('Skip Node', help='Use semicolons to separate')
  16. total_nodes = fields.Integer('Total Nodes')
  17. use_type = fields.Selection([
  18. ('web3', 'WEB3'),
  19. ('depin', 'Depin'),
  20. ], string='Use Type', default='')
  21. line_ids = fields.One2many('clash.tools.line', 'clash_tools_id', string='Line')
  22. def btn_get_all_node(self):
  23. for rec in self:
  24. if not rec.localhost_ip:
  25. continue
  26. # 先获取所有节点
  27. url = rec.localhost_ip
  28. if 'https' in url:
  29. raise UserError('Local network services do not require HTTPS.')
  30. if 'http' not in url:
  31. url = 'http://' + url
  32. self._set_global_proxy(url, rec)
  33. proxies_list = self._get_all_node(url, rec)
  34. if proxies_list:
  35. rec.total_nodes = len(proxies_list)
  36. # 清空当前 line
  37. rec.line_ids.unlink()
  38. # 循环添加节点到 line
  39. for proxies in proxies_list:
  40. if proxies == "DIRECT" or proxies == "REJECT" or proxies == "GLOBAL":
  41. continue
  42. rec.line_ids.create({
  43. 'name': proxies,
  44. 'clash_tools_id': rec.id
  45. })
  46. def btn_check_all_node(self):
  47. for rec in self:
  48. if not rec.localhost_ip:
  49. continue
  50. if not rec.line_ids:
  51. self.btn_get_all_node()
  52. url = rec.localhost_ip
  53. if 'https' in url:
  54. raise UserError('Local network services do not require HTTPS.')
  55. if 'http' not in url:
  56. url = 'http://' + url
  57. line_count = len(rec.line_ids)
  58. if line_count:
  59. rec.total_nodes = line_count
  60. with ThreadPoolExecutor(max_workers=line_count) as executor:
  61. # 提交任务到线程池
  62. futures = {executor.submit(self._check_node, quote(line.name, safe=""), url): line for line in rec.line_ids}
  63. # 处理线程池返回的结果
  64. for future in as_completed(futures):
  65. line = futures[future]
  66. try:
  67. res = future.result()
  68. if res != 9999:
  69. line.update({
  70. 'delay': res.setdefault('delay'),
  71. 'mean_delay': res.setdefault('meanDelay'),
  72. 'node_state': 'ok'
  73. })
  74. else:
  75. line.update({
  76. 'delay': 9999,
  77. 'mean_delay': 9999,
  78. 'node_state': 'error'
  79. })
  80. except Exception as e:
  81. print(str(e))
  82. line.update({
  83. 'delay': -1,
  84. 'mean_delay': -1,
  85. 'node_state': 'error'
  86. })
  87. result = rec._get_current_node()
  88. if result:
  89. rec.current_node = result
  90. def btn_select_node(self):
  91. selected_node_list = []
  92. for rec in self:
  93. if not rec.localhost_ip:
  94. continue
  95. if not rec.line_ids:
  96. self.btn_get_all_node()
  97. if not rec.line_ids:
  98. continue
  99. url = rec.localhost_ip
  100. if 'https' in url:
  101. raise UserError('Local network services do not require HTTPS.')
  102. if 'http' not in url:
  103. url = 'http://' + url
  104. # 拿到 line 中, 延迟最小的节点数据
  105. line_delay_min = self.line_ids.search([('clash_tools_id', '=', rec.id), ('node_state', '=', 'ok')], order='delay asc')
  106. for line in line_delay_min:
  107. if rec.skip_node:
  108. try:
  109. skip_node_list = rec.skip_node.split(';')
  110. except:
  111. raise UserError('Please enter the node name to skip, separated by semicolons.')
  112. # 查看是否存在需要跳过的节点, 如果是, 则跳过
  113. for skip_node in skip_node_list:
  114. if skip_node in line.name:
  115. continue
  116. else:
  117. # 这里不是跳过, 然后检查一下有没使用过这个节点
  118. if line.name in selected_node_list:
  119. continue
  120. else:
  121. # 这里是既不是跳过, 有没有使用过, 就使用这个节点
  122. self._use_select_node(line)
  123. selected_node_list.append(line.name)
  124. break
  125. else:
  126. # 如果跳过节点的条件为空, 则判断是否使用过这个节点, 没有就使用
  127. if line.name in selected_node_list:
  128. continue
  129. self._use_select_node(line)
  130. selected_node_list.append(line.name)
  131. break
  132. def _set_global_proxy(self, url, rec):
  133. setting_url = url + '/api/configs'
  134. headers = {
  135. "Accept": "application/json, text/plain, */*",
  136. "Accept-Encoding": "gzip, deflate",
  137. "Accept-Language": "zh-CN,zh;q=0.8",
  138. "Connection": "keep-alive",
  139. "Content-Type": "application/json",
  140. "Host": rec.localhost_ip,
  141. "Origin": url,
  142. "Referer": url,
  143. "Sec-Gpc": "1",
  144. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  145. }
  146. data = {
  147. "mode": "Global"
  148. }
  149. # 发送PATCH请求
  150. try:
  151. response = httpx.patch(setting_url, headers=headers, json=data)
  152. if response.status_code != 204:
  153. raise UserError(f"{rec.name} Failed to set global proxy. Status code: {response.status_code}")
  154. except httpx.RequestError as e:
  155. print("Request failed:", e)
  156. def _get_all_node(self, url, rec):
  157. proxies_list_url = url + '/api/proxies'
  158. headers = {
  159. "Accept": "application/json, text/plain, */*",
  160. "Accept-Encoding": "gzip, deflate",
  161. "Accept-Language": "zh-CN,zh;q=0.8",
  162. "Connection": "keep-alive",
  163. "Host": rec.localhost_ip,
  164. "Referer": url,
  165. "Sec-Gpc": "1",
  166. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  167. }
  168. response = httpx.get(proxies_list_url, headers=headers)
  169. result = []
  170. proxies_list = response.json()
  171. for proxies in proxies_list.get('proxies'):
  172. result.append(proxies)
  173. return result
  174. def _check_node(self, encode_proxy_name, url):
  175. command = [
  176. "curl",
  177. "-X", "GET",
  178. f"{url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
  179. ]
  180. try:
  181. result = subprocess.run(command, capture_output=True, text=True, check=True)
  182. if 'Timeout' in result.stdout:
  183. return 9999
  184. if 'An error occurred in the delay test' in result.stdout:
  185. return 9999
  186. res = eval(result.stdout)
  187. return res
  188. except subprocess.CalledProcessError as e:
  189. return 9999
  190. def _get_current_node(self):
  191. url = self.localhost_ip
  192. if 'https' in url:
  193. raise UserError('Local network services do not require HTTPS.')
  194. if 'http' not in url:
  195. url = 'http://' + url
  196. headers = {
  197. "Accept": "application/json, text/plain, */*",
  198. "Accept-Encoding": "gzip, deflate, br, zstd",
  199. "Accept-Language": "zh-CN,zh;q=0.8",
  200. "Connection": "keep-alive",
  201. "Host": self.localhost_ip,
  202. "Referer": url,
  203. "Sec-CH-UA": '"Chromium";v="134", "Not:A-Brand";v="24", "Brave";v="134"',
  204. "Sec-CH-UA-Mobile": "?0",
  205. "Sec-CH-UA-Platform": '"macOS"',
  206. "Sec-Fetch-Dest": "empty",
  207. "Sec-Fetch-Mode": "cors",
  208. "Sec-Fetch-Site": "same-origin",
  209. "Sec-GPC": "1",
  210. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  211. }
  212. try:
  213. response = httpx.get(url + '/api/proxies', headers=headers)
  214. if not response.json() or response.status_code != 200:
  215. print("JSON data is empty or request failed")
  216. return ''
  217. json_data = response.json()
  218. proxies = json_data.get("proxies")
  219. proxy_global = proxies.get("GLOBAL")
  220. now_proxy = proxy_global.get("now")
  221. return now_proxy
  222. except httpx.RequestError as e:
  223. print(f"Request failed: {e}")
  224. return ''
  225. def _use_select_node(self, line):
  226. url = line.clash_tools_id.localhost_ip
  227. if 'https' in url:
  228. raise UserError('Local network services do not require HTTPS.')
  229. if 'http' not in url:
  230. url = 'http://' + url
  231. url = url + "/api/proxies/GLOBAL"
  232. data = {"name": line.name}
  233. try:
  234. response = httpx.put(url, json=data)
  235. if response.status_code == 204:
  236. print(f"{line.clash_tools_id.localhost_ip} Switched to proxy: {line.name}")
  237. line.clash_tools_id.update({'current_node': line.clash_tools_id._get_current_node()})
  238. else:
  239. print(f"Failed to switch proxy: {response.status_code} - {line.name}")
  240. except Exception as e:
  241. print(f"Failed to switch proxy: {e}")
  242. class ClashToolsLine(models.Model):
  243. _name = 'clash.tools.line'
  244. _description = 'Clash Tools Line'
  245. _order = 'delay ASC'
  246. name = fields.Char('Name')
  247. delay = fields.Integer('Delay')
  248. mean_delay = fields.Integer('Mean Delay')
  249. node_state = fields.Selection([
  250. ('error', 'Error'),
  251. ('ok', 'OK'),
  252. ], string='Node State', default='')
  253. clash_tools_id = fields.Many2one('clash.tools', string='Clash Tools')
  254. def btn_use_this_node(self):
  255. url = self.clash_tools_id.localhost_ip
  256. if 'https' in url:
  257. raise UserError('Local network services do not require HTTPS.')
  258. if 'http' not in url:
  259. url = 'http://' + url
  260. url = url + "/api/proxies/GLOBAL"
  261. data = {"name": self.name}
  262. try:
  263. response = httpx.put(url, json=data)
  264. if response.status_code == 204:
  265. print(f"{self.clash_tools_id.localhost_ip} Switched to proxy: {self.name}")
  266. self.clash_tools_id.update({'current_node': self.clash_tools_id._get_current_node()})
  267. else:
  268. print(f"Failed to switch proxy: {response.status_code} - {self.name}")
  269. except Exception as e:
  270. print(f"Failed to switch proxy: {e}")
  271. def check_single_node(self):
  272. url = self.clash_tools_id.localhost_ip
  273. if 'https' in url:
  274. raise UserError('Local network services do not require HTTPS.')
  275. if 'http' not in url:
  276. url = 'http://' + url
  277. encode_proxy_name = quote(self.name, safe="")
  278. command = [
  279. "curl",
  280. "-X", "GET",
  281. f"{url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
  282. ]
  283. try:
  284. result = subprocess.run(command, capture_output=True, text=True, check=True)
  285. if 'Timeout' in result.stdout:
  286. res = 9999
  287. if 'An error occurred in the delay test' in result.stdout:
  288. res = 9999
  289. res = eval(result.stdout)
  290. except subprocess.CalledProcessError as e:
  291. res = 9999
  292. if res != 9999:
  293. self.update({
  294. 'delay': res.setdefault('delay'),
  295. 'mean_delay': res.setdefault('meanDelay'),
  296. 'node_state': 'ok'
  297. })
  298. else:
  299. self.update({
  300. 'delay': res,
  301. 'mean_delay': res,
  302. 'node_state': 'error'
  303. })