main.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. # -*- coding: utf-8 -*-
  2. from urllib.parse import quote
  3. import subprocess
  4. import httpx
  5. nodes = [
  6. ["192.168.31.201", ['58001', '58002', '58003', '58004', '58005', '58006', '58007', '58008', '58009',
  7. '58010'], ],
  8. ["192.168.31.201", ['32001', '32002', '32003', '32004', '32005', '32006', '32007', '32008', '32009',
  9. '32010', '32011', '32012']],
  10. ["127.0.0.1", ['17888']]
  11. ]
  12. selected_nodes = nodes[0]
  13. def patch_config(url_and_port):
  14. url = f"http://{url_and_port}"
  15. key = "/api/configs"
  16. full_url = url + key
  17. data = {"mode": "Global"}
  18. headers = {"Content-Type": "application/json"}
  19. try:
  20. response = httpx.patch(full_url, json=data, headers=headers)
  21. state = response.status_code
  22. if state == 204:
  23. pass
  24. # print(f"{url_and_port}: 切换全局代理 OK")
  25. else:
  26. raise Exception(f"请求失败: {response.status_code}")
  27. except httpx.HTTPError as exc:
  28. print(f"请求失败: {exc}")
  29. exit(1)
  30. def check_proxy(proxy_url, choose_proxy):
  31. encode_proxy_name = quote(choose_proxy, safe="")
  32. command = [
  33. "curl",
  34. "-X", "GET",
  35. f"{proxy_url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
  36. ]
  37. try:
  38. result = subprocess.run(command, capture_output=True, text=True, check=True)
  39. # print("Output:", result.stdout)
  40. if 'Timeout' in result.stdout:
  41. return "Timeout"
  42. res = eval(result.stdout).get("meanDelay")
  43. return f"meanDelay: {res}"
  44. except subprocess.CalledProcessError as e:
  45. print("Error:", e.stderr)
  46. return str(e)
  47. def check_now_proxy(url_and_port):
  48. url = f"http://{url_and_port}/api/proxies"
  49. print(url)
  50. headers = {
  51. "Accept": "application/json, text/plain, */*",
  52. "Accept-Encoding": "gzip, deflate, br, zstd",
  53. "Accept-Language": "zh-CN,zh;q=0.8",
  54. "Connection": "keep-alive",
  55. "Host": url_and_port,
  56. "Referer": f"http://{url_and_port}/",
  57. "Sec-CH-UA": '"Chromium";v="134", "Not:A-Brand";v="24", "Brave";v="134"',
  58. "Sec-CH-UA-Mobile": "?0",
  59. "Sec-CH-UA-Platform": '"macOS"',
  60. "Sec-Fetch-Dest": "empty",
  61. "Sec-Fetch-Mode": "cors",
  62. "Sec-Fetch-Site": "same-origin",
  63. "Sec-GPC": "1",
  64. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  65. }
  66. try:
  67. response = httpx.get(url, headers=headers)
  68. json_data = response.json()
  69. if not json_data or response.status_code != 200:
  70. print("JSON data is empty or request failed")
  71. return
  72. proxies = json_data.get("proxies")
  73. proxy_global = proxies.get("GLOBAL")
  74. now_proxy = proxy_global.get("now")
  75. return now_proxy
  76. except httpx.RequestError as e:
  77. print(f"Request failed: {e}")
  78. return False
  79. def run():
  80. ip = selected_nodes[0]
  81. for port in selected_nodes[1]:
  82. url_and_port = f"{ip}:{port}"
  83. # 切换全局代理
  84. patch_config(url_and_port)
  85. # 获取当前代理节点
  86. now_proxy = check_now_proxy(url_and_port)
  87. # 检测当前代理节点的延迟
  88. check_result = check_proxy(url_and_port, now_proxy)
  89. message = f"{url_and_port} --- {now_proxy} --- {check_result}\n{'*' * 88}\n"
  90. print(message)
  91. def main():
  92. run()
  93. if __name__ == "__main__":
  94. main()