check_now_proxy.py 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. # -*- coding: utf-8 -*-
  2. # 获取当前使用代理, 并检查延时
  3. from urllib.parse import quote
  4. import subprocess
  5. import httpx
  6. nodes = [
  7. ["192.168.31.201", ['58001', '58002', '58003', '58004', '58005', '58006', '58007', '58008', '58009',
  8. '58010'], ],
  9. ["192.168.31.201", ['32001', '32002', '32003', '32004', '32005', '32006', '32007', '32008', '32009',
  10. '32010', '32011', '32012']],
  11. ["127.0.0.1", ['17888']]
  12. ]
  13. selected_nodes = nodes[0]
  14. def check_proxy(proxy_url, choose_proxy):
  15. encode_proxy_name = quote(choose_proxy, safe="")
  16. command = [
  17. "curl",
  18. "-X", "GET",
  19. f"{proxy_url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
  20. ]
  21. try:
  22. result = subprocess.run(command, capture_output=True, text=True, check=True)
  23. # print("Output:", result.stdout)
  24. if 'Timeout' in result.stdout:
  25. return "Timeout"
  26. res = eval(result.stdout).get("meanDelay")
  27. return f"meanDelay: {res}"
  28. except subprocess.CalledProcessError as e:
  29. print("Error:", e.stderr)
  30. return str(e)
  31. def check_now_proxy(client: httpx.Client, ip, port):
  32. url_and_port = f"{ip}:{port}"
  33. url = f"http://{ip}:{port}/api/proxies"
  34. headers = {
  35. "Accept": "application/json, text/plain, */*",
  36. "Accept-Encoding": "gzip, deflate, br, zstd",
  37. "Accept-Language": "zh-CN,zh;q=0.8",
  38. "Connection": "keep-alive",
  39. "Host": url_and_port,
  40. "Referer": f"http://{url_and_port}/",
  41. "Sec-CH-UA": '"Chromium";v="134", "Not:A-Brand";v="24", "Brave";v="134"',
  42. "Sec-CH-UA-Mobile": "?0",
  43. "Sec-CH-UA-Platform": '"macOS"',
  44. "Sec-Fetch-Dest": "empty",
  45. "Sec-Fetch-Mode": "cors",
  46. "Sec-Fetch-Site": "same-origin",
  47. "Sec-GPC": "1",
  48. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  49. }
  50. try:
  51. response = client.get(url, headers=headers)
  52. json_data = response.json()
  53. if not json_data or response.status_code != 200:
  54. print("JSON data is empty or request failed")
  55. return
  56. proxies = json_data.get("proxies")
  57. proxy_global = proxies.get("GLOBAL")
  58. now_proxy = proxy_global.get("now")
  59. check_result = check_proxy(url_and_port, now_proxy)
  60. message = f"{url_and_port} --- {now_proxy} --- {check_result}"
  61. print(message)
  62. return now_proxy
  63. except httpx.RequestError as e:
  64. print(f"Request failed: {e}")
  65. return None
  66. def main():
  67. ip = selected_nodes[0]
  68. now_proxy_list = []
  69. with httpx.Client() as client:
  70. for port in selected_nodes[1]:
  71. now_proxy = check_now_proxy(client, ip, port)
  72. if now_proxy:
  73. now_proxy_list.append(now_proxy)
  74. # 查看当前代理节点有没有使用重复
  75. if len(now_proxy_list) != len(set(now_proxy_list)):
  76. print("当前代理节点有使用重复")
  77. return
  78. if __name__ == "__main__":
  79. main()