| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566 |
- # -*- coding: utf-8 -*-
- # 检查所有代理延时
- import asyncio
- import aiohttp
- nodes_select = 0
- ip = "192.168.31.201"
- nodes = [
- ['58001', '58002', '58003', '58004', '58005', '58006', '58007', '58008', '58009', '58010'],
- ['32001', '32002', '32003', '32004', '32005', '32006', '32007', '32008', '32009', '32010', '32011', '32012']
- ]
- node = nodes[nodes_select]
- async def check(url, session):
- for retry in range(3):
- try:
- async with session.get(url) as response:
- if response.status == 200:
- print(f"Success: {url}")
- return True
- else:
- print(f"Failed: {url} (Status: {response.status})")
- except Exception as e:
- print(f"Error: {url} - {str(e)}")
- return False
- async def prepare_check(node_url, ports, proxy_name_list):
- async with aiohttp.ClientSession() as session:
- tasks = []
- for port in ports:
- proxy_url = f"{node_url}:{port}"
- for proxy in proxy_name_list:
- url = f"{proxy_url}/api/proxies/{proxy}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
- tasks.append(check(url, session))
- await asyncio.gather(*tasks)
- async def load_nodes_details():
- tasks = []
- async with aiohttp.ClientSession() as session:
- for port in node:
- clash_tool_url = f"{ip}:{port}"
- url = f"{clash_tool_url}/api/proxies"
- try:
- async with session.get(url) as response:
- if response.status == 200:
- proxies = await response.json()
- proxy_name_list = list(proxies['proxies'].keys())
- tasks.append(prepare_check(ip, port, proxy_name_list))
- else:
- print(f"Failed to load proxies from {url} (Status: {response.status})")
- except Exception as e:
- print(f"Error: {url} - {str(e)}")
- await asyncio.gather(*tasks)
- async def main():
- await load_nodes_details()
- if __name__ == "__main__":
- asyncio.run(main())
- print("All done!")
|