| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162 |
- import asyncio
- import aiohttp
- from urllib.parse import quote
- async def check(url, session):
- for retry in range(3):
- try:
- async with session.get(url) as response:
- if response.status == 200:
- print(f"Success: {url}")
- return True
- else:
- print(f"Failed: {url} (Status: {response.status})")
- except Exception as e:
- print(f"Error: {url} - {str(e)}")
- return False
- async def prepare_check(node_url, ports, proxy_name_list):
- async with aiohttp.ClientSession() as session:
- tasks = []
- for port in ports:
- proxy_url = f"{node_url}:{port}"
- for proxy in proxy_name_list:
- url = f"{proxy_url}/api/proxies/{proxy}/delay?timeout=5000&url=http:%2F%2Fwww.gstatic.com%2Fgenerate_204"
- tasks.append(check(url, session))
- await asyncio.gather(*tasks)
- async def load_nodes_details():
- tasks = []
- async with aiohttp.ClientSession() as session:
- for node_url, ports in nodes.items():
- clash_tool_url = f"{node_url}:{ports[0]}"
- url = f"{clash_tool_url}/api/proxies"
- try:
- async with session.get(url) as response:
- if response.status == 200:
- proxies = await response.json()
- proxy_name_list = list(proxies['proxies'].keys())
- tasks.append(prepare_check(node_url, ports, proxy_name_list))
- else:
- print(f"Failed to load proxies from {url} (Status: {response.status})")
- except Exception as e:
- print(f"Error: {url} - {str(e)}")
- await asyncio.gather(*tasks)
- async def main():
- await load_nodes_details()
- if __name__ == "__main__":
- nodes = {
- "http://192.168.31.194": ['58001', '58002', '58003', '58004', '58005', '58006', '58007', '58008', '58009',
- '58010'],
- "http://192.168.31.201": ['32001', '32002', '32003', '32004', '32005', '32006', '32007', '32008', '32009',
- '32010', '32011', '32012']
- }
- asyncio.run(main())
- print("All done!")
|