clash_tools.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407
  1. # -*- coding: utf-8 -*-
  2. from concurrent.futures import ThreadPoolExecutor, as_completed
  3. from odoo import fields, models
  4. from odoo.exceptions import UserError
  5. from urllib.parse import quote
  6. import httpx
  7. import subprocess
  8. class ClashTools(models.Model):
  9. _name = 'clash.tools'
  10. _description = 'Clash Tools'
  11. name = fields.Char('Name')
  12. localhost_ip = fields.Char('Localhost IP')
  13. api_ip = fields.Char('API IP')
  14. current_node = fields.Char('Current Node')
  15. skip_node = fields.Char('Skip Node', help='Use semicolons to separate')
  16. total_nodes = fields.Integer('Total Nodes')
  17. use_type = fields.Selection([
  18. ('web3', 'WEB3'),
  19. ('depin', 'Depin'),
  20. ], string='Use Type', default='')
  21. line_ids = fields.One2many('clash.tools.line', 'clash_tools_id', string='Line')
  22. def btn_init_data(self):
  23. # 一键创建所有局域网中的 clash 连接, 因为懒
  24. data_dict = {
  25. 'web3_01': '192.168.31.201:58001',
  26. 'web3_02': '192.168.31.201:58002',
  27. 'web3_03': '192.168.31.201:58003',
  28. 'web3_04': '192.168.31.201:58004',
  29. 'web3_05': '192.168.31.201:58005',
  30. 'web3_06': '192.168.31.201:58006',
  31. 'web3_07': '192.168.31.201:58007',
  32. 'web3_08': '192.168.31.201:58008',
  33. 'web3_09': '192.168.31.201:58009',
  34. 'web3_10': '192.168.31.201:58010',
  35. 'depin_01': '192.168.31.201:32001',
  36. 'depin_02': '192.168.31.201:32002',
  37. 'depin_03': '192.168.31.201:32003',
  38. 'depin_04': '192.168.31.201:32004',
  39. 'depin_05': '192.168.31.201:32005',
  40. 'depin_06': '192.168.31.201:32006',
  41. 'depin_07': '192.168.31.201:32007',
  42. 'depin_08': '192.168.31.201:32008',
  43. 'depin_09': '192.168.31.201:32009',
  44. 'depin_10': '192.168.31.201:32010',
  45. 'depin_11': '192.168.31.201:32011',
  46. 'depin_12': '192.168.31.201:32012',
  47. }
  48. all_data_name_list = [i.name for i in self.search([])]
  49. print(all_data_name_list)
  50. for key, value in data_dict.items():
  51. if key in all_data_name_list:
  52. continue
  53. else:
  54. use_type = ''
  55. if 'depin' in key:
  56. use_type = 'depin'
  57. elif 'web3' in key:
  58. use_type = 'web3'
  59. self.create({
  60. 'name': key,
  61. 'localhost_ip': value,
  62. 'use_type': use_type
  63. })
  64. def btn_get_all_node(self):
  65. for rec in self:
  66. if not rec.localhost_ip:
  67. continue
  68. # 先获取所有节点
  69. url = rec.localhost_ip
  70. if 'https' in url:
  71. raise UserError('Local network services do not require HTTPS.')
  72. if 'http' not in url:
  73. url = 'http://' + url
  74. self._set_global_proxy(url, rec)
  75. proxies_list = self._get_all_node(url, rec)
  76. if proxies_list:
  77. rec.total_nodes = len(proxies_list)
  78. # 清空当前 line
  79. rec.line_ids.unlink()
  80. # 循环添加节点到 line
  81. for proxies in proxies_list:
  82. if proxies == "DIRECT" or proxies == "REJECT" or proxies == "GLOBAL":
  83. continue
  84. rec.line_ids.create({
  85. 'name': proxies,
  86. 'clash_tools_id': rec.id
  87. })
  88. def btn_check_all_node(self):
  89. for rec in self:
  90. if not rec.localhost_ip:
  91. continue
  92. if not rec.line_ids:
  93. self.btn_get_all_node()
  94. url = rec.localhost_ip
  95. if 'https' in url:
  96. raise UserError('Local network services do not require HTTPS.')
  97. if 'http' not in url:
  98. url = 'http://' + url
  99. line_count = len(rec.line_ids)
  100. if line_count:
  101. rec.total_nodes = line_count
  102. with ThreadPoolExecutor(max_workers=line_count) as executor:
  103. # 提交任务到线程池
  104. futures = {executor.submit(self._check_node, quote(line.name, safe=""), url): line for line in rec.line_ids}
  105. # 处理线程池返回的结果
  106. for future in as_completed(futures):
  107. line = futures[future]
  108. try:
  109. res = future.result()
  110. if res != 9999:
  111. line.update({
  112. 'delay': res.setdefault('delay'),
  113. 'mean_delay': res.setdefault('meanDelay'),
  114. 'node_state': 'ok'
  115. })
  116. else:
  117. line.update({
  118. 'delay': 9999,
  119. 'mean_delay': 9999,
  120. 'node_state': 'error'
  121. })
  122. except Exception as e:
  123. print(str(e))
  124. line.update({
  125. 'delay': -1,
  126. 'mean_delay': -1,
  127. 'node_state': 'error'
  128. })
  129. result = rec._get_current_node()
  130. if result:
  131. rec.current_node = result
  132. def btn_select_node(self):
  133. selected_node_list = []
  134. for rec in self:
  135. if not rec.localhost_ip:
  136. continue
  137. if not rec.line_ids:
  138. self.btn_get_all_node()
  139. if not rec.line_ids:
  140. continue
  141. url = rec.localhost_ip
  142. if 'https' in url:
  143. raise UserError('Local network services do not require HTTPS.')
  144. if 'http' not in url:
  145. url = 'http://' + url
  146. # 拿到 line 中, 延迟最小的节点数据
  147. line_delay_min = self.line_ids.search([('clash_tools_id', '=', rec.id), ('node_state', '=', 'ok')], order='delay asc')
  148. for line in line_delay_min:
  149. if rec.skip_node:
  150. try:
  151. skip_node_list = rec.skip_node.split(';')
  152. except:
  153. raise UserError('Please enter the node name to skip, separated by semicolons.')
  154. # 查看是否存在需要跳过的节点, 如果是, 则跳过
  155. for skip_node in skip_node_list:
  156. if skip_node in line.name:
  157. continue
  158. else:
  159. # 这里不是跳过, 然后检查一下有没使用过这个节点
  160. if line.name in selected_node_list:
  161. continue
  162. else:
  163. # 这里是既不是跳过, 有没有使用过, 就使用这个节点
  164. self._use_select_node(line)
  165. selected_node_list.append(line.name)
  166. break
  167. else:
  168. # 如果跳过节点的条件为空, 则判断是否使用过这个节点, 没有就使用
  169. if line.name in selected_node_list:
  170. continue
  171. self._use_select_node(line)
  172. selected_node_list.append(line.name)
  173. break
  174. def _set_global_proxy(self, url, rec):
  175. setting_url = url + '/api/configs'
  176. headers = {
  177. "Accept": "application/json, text/plain, */*",
  178. "Accept-Encoding": "gzip, deflate",
  179. "Accept-Language": "zh-CN,zh;q=0.8",
  180. "Connection": "keep-alive",
  181. "Content-Type": "application/json",
  182. "Host": rec.localhost_ip,
  183. "Origin": url,
  184. "Referer": url,
  185. "Sec-Gpc": "1",
  186. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  187. }
  188. data = {
  189. "mode": "Global"
  190. }
  191. # 发送PATCH请求
  192. try:
  193. response = httpx.patch(setting_url, headers=headers, json=data)
  194. if response.status_code != 204:
  195. raise UserError(f"{rec.name} Failed to set global proxy. Status code: {response.status_code}")
  196. except httpx.RequestError as e:
  197. print("Request failed:", e)
  198. def _get_all_node(self, url, rec):
  199. proxies_list_url = url + '/api/proxies'
  200. headers = {
  201. "Accept": "application/json, text/plain, */*",
  202. "Accept-Encoding": "gzip, deflate",
  203. "Accept-Language": "zh-CN,zh;q=0.8",
  204. "Connection": "keep-alive",
  205. "Host": rec.localhost_ip,
  206. "Referer": url,
  207. "Sec-Gpc": "1",
  208. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  209. }
  210. response = httpx.get(proxies_list_url, headers=headers)
  211. result = []
  212. proxies_list = response.json()
  213. for proxies in proxies_list.get('proxies'):
  214. result.append(proxies)
  215. return result
  216. def _check_node(self, encode_proxy_name, url):
  217. command = [
  218. "curl",
  219. "-X", "GET",
  220. f"{url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
  221. ]
  222. try:
  223. result = subprocess.run(command, capture_output=True, text=True, check=True)
  224. if 'Timeout' in result.stdout:
  225. return 9999
  226. if 'An error occurred in the delay test' in result.stdout:
  227. return 9999
  228. res = eval(result.stdout)
  229. return res
  230. except subprocess.CalledProcessError as e:
  231. return 9999
  232. def _get_current_node(self):
  233. url = self.localhost_ip
  234. if 'https' in url:
  235. raise UserError('Local network services do not require HTTPS.')
  236. if 'http' not in url:
  237. url = 'http://' + url
  238. headers = {
  239. "Accept": "application/json, text/plain, */*",
  240. "Accept-Encoding": "gzip, deflate, br, zstd",
  241. "Accept-Language": "zh-CN,zh;q=0.8",
  242. "Connection": "keep-alive",
  243. "Host": self.localhost_ip,
  244. "Referer": url,
  245. "Sec-CH-UA": '"Chromium";v="134", "Not:A-Brand";v="24", "Brave";v="134"',
  246. "Sec-CH-UA-Mobile": "?0",
  247. "Sec-CH-UA-Platform": '"macOS"',
  248. "Sec-Fetch-Dest": "empty",
  249. "Sec-Fetch-Mode": "cors",
  250. "Sec-Fetch-Site": "same-origin",
  251. "Sec-GPC": "1",
  252. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  253. }
  254. try:
  255. response = httpx.get(url + '/api/proxies', headers=headers)
  256. if not response.json() or response.status_code != 200:
  257. print("JSON data is empty or request failed")
  258. return ''
  259. json_data = response.json()
  260. proxies = json_data.get("proxies")
  261. proxy_global = proxies.get("GLOBAL")
  262. now_proxy = proxy_global.get("now")
  263. return now_proxy
  264. except httpx.RequestError as e:
  265. print(f"Request failed: {e}")
  266. return ''
  267. def _use_select_node(self, line):
  268. url = line.clash_tools_id.localhost_ip
  269. if 'https' in url:
  270. raise UserError('Local network services do not require HTTPS.')
  271. if 'http' not in url:
  272. url = 'http://' + url
  273. url = url + "/api/proxies/GLOBAL"
  274. data = {"name": line.name}
  275. try:
  276. response = httpx.put(url, json=data)
  277. if response.status_code == 204:
  278. print(f"{line.clash_tools_id.localhost_ip} Switched to proxy: {line.name}")
  279. line.clash_tools_id.update({'current_node': line.clash_tools_id._get_current_node()})
  280. else:
  281. print(f"Failed to switch proxy: {response.status_code} - {line.name}")
  282. except Exception as e:
  283. print(f"Failed to switch proxy: {e}")
  284. class ClashToolsLine(models.Model):
  285. _name = 'clash.tools.line'
  286. _description = 'Clash Tools Line'
  287. _order = 'delay ASC'
  288. name = fields.Char('Name')
  289. delay = fields.Integer('Delay')
  290. mean_delay = fields.Integer('Mean Delay')
  291. node_state = fields.Selection([
  292. ('error', 'Error'),
  293. ('ok', 'OK'),
  294. ], string='Node State', default='')
  295. clash_tools_id = fields.Many2one('clash.tools', string='Clash Tools')
  296. def btn_use_this_node(self):
  297. url = self.clash_tools_id.localhost_ip
  298. if 'https' in url:
  299. raise UserError('Local network services do not require HTTPS.')
  300. if 'http' not in url:
  301. url = 'http://' + url
  302. url = url + "/api/proxies/GLOBAL"
  303. data = {"name": self.name}
  304. try:
  305. response = httpx.put(url, json=data)
  306. if response.status_code == 204:
  307. print(f"{self.clash_tools_id.localhost_ip} Switched to proxy: {self.name}")
  308. self.clash_tools_id.update({'current_node': self.clash_tools_id._get_current_node()})
  309. else:
  310. print(f"Failed to switch proxy: {response.status_code} - {self.name}")
  311. except Exception as e:
  312. print(f"Failed to switch proxy: {e}")
  313. def check_single_node(self):
  314. url = self.clash_tools_id.localhost_ip
  315. if 'https' in url:
  316. raise UserError('Local network services do not require HTTPS.')
  317. if 'http' not in url:
  318. url = 'http://' + url
  319. encode_proxy_name = quote(self.name, safe="")
  320. command = [
  321. "curl",
  322. "-X", "GET",
  323. f"{url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
  324. ]
  325. try:
  326. result = subprocess.run(command, capture_output=True, text=True, check=True)
  327. if 'Timeout' in result.stdout:
  328. res = 9999
  329. if 'An error occurred in the delay test' in result.stdout:
  330. res = 9999
  331. res = eval(result.stdout)
  332. except subprocess.CalledProcessError as e:
  333. res = 9999
  334. if res != 9999:
  335. self.update({
  336. 'delay': res.setdefault('delay'),
  337. 'mean_delay': res.setdefault('meanDelay'),
  338. 'node_state': 'ok'
  339. })
  340. else:
  341. self.update({
  342. 'delay': res,
  343. 'mean_delay': res,
  344. 'node_state': 'error'
  345. })