check_now_proxy.py 2.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. # -*- coding: utf-8 -*-
  2. from urllib.parse import quote
  3. import subprocess
  4. import httpx
  5. nodes = [
  6. ["192.168.31.194", ['58001', '58002', '58003', '58004', '58005', '58006', '58007', '58008', '58009',
  7. '58010'], ],
  8. ["192.168.31.201", ['32001', '32002', '32003', '32004', '32005', '32006', '32007', '32008', '32009',
  9. '32010', '32011', '32012']],
  10. ["127.0.0.1", ['17888']]
  11. ]
  12. selected_nodes = nodes[0]
  13. def check_proxy(proxy_url, choose_proxy):
  14. encode_proxy_name = quote(choose_proxy, safe="")
  15. command = [
  16. "curl",
  17. "-X", "GET",
  18. f"{proxy_url}/api/proxies/{encode_proxy_name}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
  19. ]
  20. try:
  21. result = subprocess.run(command, capture_output=True, text=True, check=True)
  22. # print("Output:", result.stdout)
  23. if 'Timeout' in result.stdout:
  24. return "Timeout"
  25. res = eval(result.stdout).get("meanDelay")
  26. return f"meanDelay: {res}"
  27. except subprocess.CalledProcessError as e:
  28. print("Error:", e.stderr)
  29. return str(e)
  30. def check_now_proxy(ip, port):
  31. url_and_port = f"{ip}:{port}"
  32. url = f"http://{ip}:{port}/api/proxies"
  33. headers = {
  34. "Accept": "application/json, text/plain, */*",
  35. "Accept-Encoding": "gzip, deflate, br, zstd",
  36. "Accept-Language": "zh-CN,zh;q=0.8",
  37. "Connection": "keep-alive",
  38. "Host": url_and_port,
  39. "Referer": f"http://{url_and_port}/",
  40. "Sec-CH-UA": '"Chromium";v="134", "Not:A-Brand";v="24", "Brave";v="134"',
  41. "Sec-CH-UA-Mobile": "?0",
  42. "Sec-CH-UA-Platform": '"macOS"',
  43. "Sec-Fetch-Dest": "empty",
  44. "Sec-Fetch-Mode": "cors",
  45. "Sec-Fetch-Site": "same-origin",
  46. "Sec-GPC": "1",
  47. "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36"
  48. }
  49. try:
  50. response = httpx.get(url, headers=headers)
  51. json_data = response.json()
  52. if not json_data or response.status_code != 200:
  53. print("JSON data is empty or request failed")
  54. return
  55. proxies = json_data.get("proxies")
  56. proxy_global = proxies.get("GLOBAL")
  57. now_proxy = proxy_global.get("now")
  58. check_result = check_proxy(url_and_port, now_proxy)
  59. message = f"{url_and_port} --- {now_proxy} --- {check_result}"
  60. print(message)
  61. except httpx.RequestError as e:
  62. print(f"Request failed: {e}")
  63. def main():
  64. ip = selected_nodes[0]
  65. for port in selected_nodes[1]:
  66. check_now_proxy(ip, port)
  67. if __name__ == "__main__":
  68. main()