clash_check_now_node.py 2.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. # -*- coding: utf-8 -*-
  2. # 检查所有节点是否有重复
  3. import asyncio
  4. import httpx
  5. from typing import Optional, Dict, Any, List, Tuple
  6. async def check_now_node(url_and_port: str) -> Optional[str]:
  7. """检测当前节点并设置全局代理"""
  8. async with httpx.AsyncClient(timeout=10.0) as client:
  9. try:
  10. # 设置全局模式
  11. set_url = f"http://{url_and_port}/api/configs"
  12. set_data = {"mode": "Global"}
  13. set_response = await client.patch(set_url, json=set_data)
  14. set_response.raise_for_status()
  15. # 获取代理信息
  16. get_url = f"http://{url_and_port}/api/proxies"
  17. get_response = await client.get(get_url)
  18. get_response.raise_for_status()
  19. json_data = get_response.json()
  20. proxies: Dict[str, Any] = json_data.get("proxies", {})
  21. proxy_global: Dict[str, Any] = proxies.get("GLOBAL", {})
  22. now_proxy: Optional[str] = proxy_global.get("now")
  23. return now_proxy
  24. except httpx.HTTPError as exc:
  25. print(f"请求失败 {url_and_port}: {exc}")
  26. return None
  27. async def batch_check_nodes(ip: str, ports: List[str]) -> Dict[str, Optional[str]]:
  28. """批量检测节点"""
  29. tasks = [check_now_node(f"{ip}:{port}") for port in ports]
  30. results = await asyncio.gather(*tasks)
  31. return {
  32. f"{ip}:{port}": result
  33. for port, result in zip(ports, results)
  34. }
  35. def find_duplicate_nodes(results: Dict[str, Optional[str]]) -> List[Tuple[str, str]]:
  36. """查找重复的节点"""
  37. node_to_urls = {}
  38. for url, node in results.items():
  39. if node: # 只处理成功检测的节点
  40. if node not in node_to_urls:
  41. node_to_urls[node] = []
  42. node_to_urls[node].append(url)
  43. # 找出有重复的节点
  44. duplicates = []
  45. for node, urls in node_to_urls.items():
  46. if len(urls) > 1:
  47. for i in range(len(urls)):
  48. for j in range(i + 1, len(urls)):
  49. duplicates.append((urls[i], urls[j]))
  50. return duplicates
  51. if __name__ == "__main__":
  52. ip = '192.168.31.201'
  53. ports = [f'{58000 + i}' for i in range(1, 11)]
  54. results = asyncio.run(batch_check_nodes(ip, ports))
  55. # 输出所有节点信息
  56. for url, node in results.items():
  57. print(f"{url}: {node or '检测失败'}")
  58. # 检查并输出重复节点
  59. duplicates = find_duplicate_nodes(results)
  60. if duplicates:
  61. print("\n发现重复节点:")
  62. for url1, url2 in duplicates:
  63. print(f"{url1} 和 {url2} 重复")
  64. else:
  65. print("\n没有发现重复节点")