Jack 10 ay önce
ebeveyn
işleme
eb49ba845e

+ 12 - 0
manual/MyGrassio/accounts.txt

@@ -0,0 +1,12 @@
+yujiec0210@gamil.com:aaaAAA111!!!
+yujieccyj01@hotmail.com:aaaAAA111!!!
+yujieccyj02@hotmail.com:aaaAAA111!!!
+yujieccyj03@hotmail.com:aaaAAA111!!!
+yujieccyj04@hotmail.com:aaaAAA111!!!
+yujieccyj05@hotmail.com:aaaAAA111!!!
+yujieccyj06@hotmail.com:aaaAAA111!!!
+yujieccyj07@hotmail.com:aaaAAA111!!!
+yujieccyj08@hotmail.com:aaaAAA111!!!
+yujieccyj09@hotmail.com:aaaAAA111!!!
+yujieccyj10@hotmail.com:aaaAAA111!!!
+yujieccyj11@hotmail.com:aaaAAA111!!!

+ 282 - 0
manual/MyGrassio/main.py

@@ -0,0 +1,282 @@
+import asyncio
+import json
+import random
+import sys
+import time
+import traceback
+import uuid
+
+import aiohttp
+from fake_useragent import UserAgent
+from tenacity import stop_after_attempt, retry, retry_if_not_exception_type, wait_random, retry_if_exception_type
+
+# 配置文件路径
+ACCOUNTS_FILE_PATH = "accounts.txt"
+PROXIES_FILE_PATH = "proxies.txt"
+MIN_PROXY_SCORE = 50
+
+
+# 日志记录
+class Logger:
+    @staticmethod
+    def info(message):
+        print(f"[INFO] {message}")
+
+    @staticmethod
+    def error(message):
+        print(f"[ERROR] {message}")
+
+    @staticmethod
+    def success(message):
+        print(f"[SUCCESS] {message}")
+
+
+logger = Logger()
+
+
+# 异常类
+class ProxyForbiddenException(Exception):
+    pass
+
+
+class LowProxyScoreException(Exception):
+    pass
+
+
+class ProxyScoreNotFoundException(Exception):
+    pass
+
+
+class WebsocketClosedException(Exception):
+    pass
+
+
+# Grass 类,负责登录、检测代理分数和持久连接
+class Grass:
+    def __init__(self, _id: int, email: str, password: str, proxy: str = None):
+        self.proxy = f"http://{proxy}" if proxy else None  # 添加协议
+        self.email = email
+        self.password = password
+        self.user_agent = UserAgent().random
+        self.proxy_score = None
+        self.id = _id
+        self.session = aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(ssl=False))
+
+    async def start(self):
+        try:
+            user_id = await self.login()
+            browser_id = str(self.email)  # 使用邮箱作为浏览器ID
+            await self.run(browser_id, user_id)
+        except Exception as e:
+            logger.error(f"{self.id} | Error: {e}")
+        finally:
+            await self.close()
+
+    async def run(self, browser_id: str, user_id: str):
+        while True:
+            try:
+                await self.connection_handler()
+                await self.auth_to_extension(browser_id, user_id)
+
+                if self.proxy_score is None:
+                    await asyncio.sleep(1)
+                    await self.handle_proxy_score(MIN_PROXY_SCORE)
+
+                while True:
+                    await self.send_ping()
+                    await self.send_pong()
+                    logger.info(f"{self.id} | Mined grass.")
+                    await asyncio.sleep(19.9)
+            except WebsocketClosedException as e:
+                logger.info(f"Websocket closed: {e}. Retrying...")
+            except ConnectionResetError as e:
+                logger.info(f"Connection reset: {e}. Retrying...")
+            except TypeError as e:
+                logger.info(f"Type error: {e}. Retrying...")
+            await asyncio.sleep(1)
+
+    @retry(stop=stop_after_attempt(30),
+           retry=(retry_if_exception_type(ConnectionError) | retry_if_not_exception_type(ProxyForbiddenException)),
+           wait=wait_random(0.5, 1),
+           reraise=True)
+    async def connection_handler(self):
+        logger.info(f"{self.id} | Connecting...")
+        await self.connect()
+        logger.info(f"{self.id} | Connected")
+
+    @retry(stop=stop_after_attempt(10),
+           retry=retry_if_not_exception_type(LowProxyScoreException),
+           before_sleep=lambda retry_state, **kwargs: logger.info(f"{retry_state.outcome.exception()}"),
+           wait=wait_random(5, 7),
+           reraise=True)
+    async def handle_proxy_score(self, min_score: int):
+        if (proxy_score := await self.get_proxy_score_by_device_id()) is None:
+            raise ProxyScoreNotFoundException(f"{self.id} | Proxy score not found for {self.proxy}. Guess Bad proxies!")
+        elif proxy_score >= min_score:
+            self.proxy_score = proxy_score
+            logger.success(f"{self.id} | Proxy score: {self.proxy_score}")
+            return True
+        else:
+            raise LowProxyScoreException(f"{self.id} | Too low proxy score: {proxy_score} for {self.proxy}. Exit...")
+
+    async def connect(self):
+        uri = "wss://proxy.wynd.network:4444/"
+        headers = {
+            'Pragma': 'no-cache',
+            'Origin': 'chrome-extension://ilehaonighjijnmpnagapkhpcdbhclfg',
+            'Accept-Language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
+            'User-Agent': self.user_agent,
+            'Upgrade': 'websocket',
+            'Cache-Control': 'no-cache',
+            'Connection': 'Upgrade',
+            'Sec-WebSocket-Version': '13',
+            'Sec-WebSocket-Extensions': 'permessage-deflate; client_max_window_bits',
+        }
+        try:
+            self.websocket = await self.session.ws_connect(uri, proxy_headers=headers, proxy=self.proxy)
+        except Exception as e:
+            if 'status' in dir(e) and e.status == 403:
+                raise ProxyForbiddenException(f"Low proxy score. Can't connect. Error: {e}")
+            raise e
+
+    async def send_message(self, message):
+        await self.websocket.send_str(message)
+
+    async def receive_message(self):
+        msg = await self.websocket.receive()
+        if msg.type == aiohttp.WSMsgType.CLOSED:
+            raise WebsocketClosedException(f"Websocket closed: {msg}")
+        return msg.data
+
+    async def get_connection_id(self):
+        msg = await self.receive_message()
+        return json.loads(msg)['id']
+
+    async def auth_to_extension(self, browser_id: str, user_id: str):
+        connection_id = await self.get_connection_id()
+        message = json.dumps(
+            {
+                "id": connection_id,
+                "origin_action": "AUTH",
+                "result": {
+                    "browser_id": browser_id,
+                    "user_id": user_id,
+                    "user_agent": self.user_agent,
+                    "timestamp": int(time.time()),
+                    "device_type": "extension",
+                    "version": "3.3.2"
+                }
+            }
+        )
+        await self.send_message(message)
+
+    async def send_ping(self):
+        message = json.dumps(
+            {"id": str(uuid.uuid4()), "version": "1.0.0", "action": "PING", "data": {}}
+        )
+        await self.send_message(message)
+
+    async def send_pong(self):
+        connection_id = await self.get_connection_id()
+        message = json.dumps(
+            {"id": connection_id, "origin_action": "PONG"}
+        )
+        await self.send_message(message)
+
+    async def login(self):
+        url = 'https://api.getgrass.io/login'
+        json_data = {
+            'password': self.password,
+            'username': self.email,
+        }
+        headers = {
+            'authority': 'api.getgrass.io',
+            'accept': 'application/json, text/plain, */*',
+            'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
+            'content-type': 'application/json',
+            'origin': 'https://app.getgrass.io',
+            'referer': 'https://app.getgrass.io/',
+            'sec-ch-ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
+            'sec-ch-ua-mobile': '?0',
+            'sec-ch-ua-platform': '"Windows"',
+            'sec-fetch-dest': 'empty',
+            'sec-fetch-mode': 'cors',
+            'sec-fetch-site': 'same-site',
+            'user-agent': self.user_agent,
+        }
+        response = await self.session.post(url, headers=headers, json=json_data, proxy=self.proxy)
+        if response.status != 200:
+            raise aiohttp.ClientConnectionError(f"login | {await response.text()}")
+        return await response.json()
+
+    async def get_proxy_score_by_device_id(self):
+        url = 'https://api.getgrass.io/extension/user-score'
+        headers = {
+            'authority': 'api.getgrass.io',
+            'accept': 'application/json, text/plain, */*',
+            'accept-language': 'uk-UA,uk;q=0.9,en-US;q=0.8,en;q=0.7',
+            'content-type': 'application/json',
+            'origin': 'https://app.getgrass.io',
+            'referer': 'https://app.getgrass.io/',
+            'sec-ch-ua': '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"',
+            'sec-ch-ua-mobile': '?0',
+            'sec-ch-ua-platform': '"Windows"',
+            'sec-fetch-dest': 'empty',
+            'sec-fetch-mode': 'cors',
+            'sec-fetch-site': 'same-site',
+            'user-agent': self.user_agent,
+        }
+        response = await self.session.get(url, headers=headers, proxy=self.proxy)
+        res_json = await response.json()
+        if not (isinstance(res_json, dict) and res_json.get("data", None) is not None):
+            return
+        devices = res_json['data']['currentDeviceData']
+        self.ip = await self.get_ip()
+        return next((device['final_score'] for device in devices
+                     if device['device_ip'] == self.ip), None)
+
+    async def get_ip(self):
+        return await (await self.session.get('https://api.ipify.org', proxy=self.proxy)).text()
+
+    async def close(self):
+        if self.session:
+            await self.session.close()
+
+
+# 主函数
+async def main():
+    accounts = []
+    with open(ACCOUNTS_FILE_PATH, 'r') as f:
+        accounts = f.readlines()
+
+    proxies = []
+    with open(PROXIES_FILE_PATH, 'r') as f:
+        proxies = f.readlines()
+
+    grass_instances = []
+    tasks = []
+    for i, account in enumerate(accounts):
+        email, password = account.strip().split(":")
+        proxy = proxies[i % len(proxies)].strip() if proxies else None
+        grass = Grass(i, email, password, proxy)
+        grass_instances.append(grass)
+        tasks.append(grass.start())
+
+    try:
+        await asyncio.gather(*tasks)
+    except Exception as e:
+        logger.error(f"An error occurred: {e}")
+    finally:
+        # 确保所有会话都被关闭
+        for grass in grass_instances:
+            await grass.close()
+
+
+if __name__ == "__main__":
+    if sys.platform == 'win32':
+        asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
+        loop = asyncio.ProactorEventLoop()
+        asyncio.set_event_loop(loop)
+        loop.run_until_complete(main())
+    else:
+        asyncio.run(main())

+ 12 - 0
manual/MyGrassio/proxies.txt

@@ -0,0 +1,12 @@
+192.168.31.201:59001
+192.168.31.201:59002
+192.168.31.201:59003
+192.168.31.201:59004
+192.168.31.201:59005
+192.168.31.201:59006
+192.168.31.201:59007
+192.168.31.201:59008
+192.168.31.201:59009
+192.168.31.201:59010
+192.168.31.201:59011
+192.168.31.201:59012

+ 2 - 2
manual/clash-multi/restart.sh

@@ -28,9 +28,9 @@ fi
 echo "Waiting for 3 seconds before executing switch_proxy.py..."
 sleep 3
 
-# 执行 switch_proxy.py
+# 执行 set_global_switch_proxy_group.py
 echo "Executing switch_proxy.py..."
-python switch_proxy.py
+python set_global_switch_proxy.py
 
 # 检查switch_proxy.py是否成功执行
 if [ $? -eq 0 ]; then

+ 1 - 1
manual/clash/merge.yaml

@@ -1,5 +1,5 @@
 proxies:
-  - {"name":"剩余流量:89.96 GB","type":"trojan","server":"cn1.cdn.xfltd-cdn.top","port":12001,"password":"d058a1cb-5dbd-4664-9d8b-ae26bc699ed4","udp":true,"sni":"cdn.alibaba.com","skip-cert-verify":true}
+  - {"name":"剩余流量:76.66 GB","type":"trojan","server":"cn1.cdn.xfltd-cdn.top","port":12001,"password":"d058a1cb-5dbd-4664-9d8b-ae26bc699ed4","udp":true,"sni":"cdn.alibaba.com","skip-cert-verify":true}
   - {"name":"套餐到期:长期有效","type":"trojan","server":"cn1.cdn.xfltd-cdn.top","port":12001,"password":"d058a1cb-5dbd-4664-9d8b-ae26bc699ed4","udp":true,"sni":"cdn.alibaba.com","skip-cert-verify":true}
   - {"name":"🇭🇰 香港 01","type":"trojan","server":"cn1.cdn.xfltd-cdn.top","port":12001,"password":"d058a1cb-5dbd-4664-9d8b-ae26bc699ed4","udp":true,"sni":"cdn.alibaba.com","skip-cert-verify":true}
   - {"name":"🇭🇰 香港 02","type":"trojan","server":"cn1.cdn.xfltd-cdn.top","port":12002,"password":"d058a1cb-5dbd-4664-9d8b-ae26bc699ed4","udp":true,"sni":"cdn.alibaba.com","skip-cert-verify":true}

+ 0 - 0
manual/clash/main.py → manual/clash/merge2config.py


+ 138 - 0
manual/clash/set_global_switch_proxy_random.py

@@ -0,0 +1,138 @@
+import random
+
+import httpx
+import time
+import json
+import uuid
+import logging
+import websockets
+from typing import Optional, List
+from websockets import WebSocketCommonProtocol
+
+logging.basicConfig(level=logging.INFO)
+
+BASE_URL_LIST = [
+    "http://192.168.31.201:52001",
+    "http://192.168.31.201:52002",
+    "http://192.168.31.201:52003",
+    "http://192.168.31.201:52004",
+    "http://192.168.31.201:52005",
+    "http://192.168.31.201:52006",
+    "http://192.168.31.201:52007",
+    "http://192.168.31.201:52008",
+    "http://192.168.31.201:52009",
+    "http://192.168.31.201:52010",
+]
+
+
+class ClashProxyManager:
+    def __init__(self, base_url_list):
+        self.key_group = 0
+        self.base_url_list = base_url_list
+        self.all_proxies = []
+        self.filter_seeds = []
+
+    def get_all_proxies(self, clash_tool_url: str) -> List[str]:
+        url = f"{clash_tool_url}/api/proxies"
+        try:
+            response = httpx.get(url)
+            response.raise_for_status()
+            proxies = response.json()
+            logging.info("Available proxies:")
+            # 输出读取的所有代理信息
+            # for proxy_name, proxy_info in proxies['proxies'].items():
+            #     logging.info(f"Name: {proxy_name}, Type: {proxy_info.get('type', 'Unknown')}")
+            return list(proxies['proxies'].keys())
+        except Exception as e:
+            logging.error(f"Failed to get proxies: {e}")
+            return []
+
+    def switch_proxy(self, proxy_name: str, clash_api_base_url: str) -> None:
+        url = f"{clash_api_base_url}/api/proxies/GLOBAL"
+        data = {"name": proxy_name}
+        try:
+            response = httpx.put(url, json=data)
+            if response.status_code == 204:
+                logging.info(f"Switched to proxy: {proxy_name}")
+            else:
+                logging.error(f"Failed to switch proxy: {response.status_code} - {proxy_name}")
+        except Exception as e:
+            logging.error(f"Failed to switch proxy: {e}")
+
+    def update_configs(self):
+        for base_url in self.base_url_list:
+            key = "/api/configs"
+            url = base_url + key
+
+            headers = {
+                "accept": "application/json, text/plain, */*",
+                "accept-encoding": "gzip, deflate, br, zstd",
+                "accept-language": "zh-CN,zh",
+                "connection": "keep-alive",
+                "content-type": "application/json",
+                "host": "localhost:17888",
+                "origin": base_url,
+                "referer": base_url,
+                "sec-ch-ua": '"Not A(Brand";v="8", "Chromium";v="132", "Brave";v="132"',
+                "sec-ch-ua-mobile": "?0",
+                "sec-ch-ua-platform": '"macOS"',
+                "sec-fetch-dest": "empty",
+                "sec-fetch-mode": "cors",
+                "sec-fetch-site": "same-origin",
+                "sec-gpc": "1",
+                "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
+            }
+
+            # 请求体数据
+            data = {"mode": "Global"}  # 替换为实际的请求数据
+
+            # 使用 httpx 发送 PATCH 请求
+            try:
+                with httpx.Client() as client:
+                    response = client.patch(url, headers=headers, json=data)
+                    if response.status_code == 204:
+                        print(f"{url} OK")
+                    else:
+                        print("响应内容:", response.text)
+            except httpx.RequestError as exc:
+                print(f"请求失败: {exc}")
+
+    def main(self) -> None:
+        # 设置全局代理
+        self.update_configs()
+
+        # 读取所有代理
+        if not self.all_proxies:
+            for base_url in self.base_url_list:
+                clash_tool_url = f"{base_url}"
+                proxies = self.get_all_proxies(clash_tool_url)
+                if proxies:
+                    self.all_proxies = proxies
+                    break
+
+        if not self.all_proxies:
+            logging.error("Failed to get all proxies")
+            return
+
+
+        # 遍历所有的线路, 切换代理(需要所有线路不重复), 如果self.filter_seeds为空,则在self.all_proxies选择
+        # 切换完之后测试一下当前代理是否正常工作,否则切换下一个代理, 测试地址: https://api.ipify.org
+        switched_agent = []
+        for base_url in self.base_url_list:
+            clash_api_base_url = f"{base_url}"
+            choice_proxy = random.choice(self.all_proxies)
+            if choice_proxy in switched_agent:
+                continue
+            while True:
+                if self.filter_seeds:
+                    pass
+                else:
+                    self.switch_proxy(choice_proxy, clash_api_base_url)
+                    # 切换完之后测试一下当前代理是否正常工作,否则切换下一个代理
+                    switched_agent.append(choice_proxy)
+
+
+
+if __name__ == "__main__":
+    manager = ClashProxyManager(BASE_URL_LIST)
+    manager.main()

+ 0 - 66
manual/clash/switch_global.py

@@ -1,66 +0,0 @@
-import httpx
-
-# base_url_list = [
-#     "http://192.168.31.201:58001",
-#     "http://192.168.31.201:58002",
-#     "http://192.168.31.201:58003",
-#     "http://192.168.31.201:58004",
-#     "http://192.168.31.201:58005",
-#     "http://192.168.31.201:58006",
-#     "http://192.168.31.201:58007",
-#     "http://192.168.31.201:58008",
-#     "http://192.168.31.201:58009",
-#     "http://192.168.31.201:58010",
-# ]
-base_url_list = [
-    "http://192.168.64.32:58001",
-    "http://192.168.64.32:58002",
-    "http://192.168.64.32:58003",
-    "http://192.168.64.32:58004",
-    "http://192.168.64.32:58005",
-    "http://192.168.64.32:58006",
-    "http://192.168.64.32:58007",
-    "http://192.168.64.32:58008",
-    "http://192.168.64.32:58009",
-    "http://192.168.64.32:58010",
-    "http://192.168.64.32:58011",
-    "http://192.168.64.32:58012",
-]
-
-for base_url in base_url_list:
-    key = "/api/configs"
-
-    url = base_url + key
-
-    headers = {
-        "accept": "application/json, text/plain, */*",
-        "accept-encoding": "gzip, deflate, br, zstd",
-        "accept-language": "zh-CN,zh",
-        "connection": "keep-alive",
-        "content-type": "application/json",
-        "host": "localhost:17888",
-        "origin": base_url,
-        "referer": base_url,
-        "sec-ch-ua": '"Not A(Brand";v="8", "Chromium";v="132", "Brave";v="132"',
-        "sec-ch-ua-mobile": "?0",
-        "sec-ch-ua-platform": '"macOS"',
-        "sec-fetch-dest": "empty",
-        "sec-fetch-mode": "cors",
-        "sec-fetch-site": "same-origin",
-        "sec-gpc": "1",
-        "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
-    }
-
-    # 请求体数据
-    data = {"mode": "Global"}  # 替换为实际的请求数据
-
-    # 使用 httpx 发送 PATCH 请求
-    try:
-        with httpx.Client() as client:
-            response = client.patch(url, headers=headers, json=data)
-            if response.status_code == 204:
-                print(f"{url} OK")
-            else:
-                print("响应内容:", response.text)
-    except httpx.RequestError as exc:
-        print(f"请求失败: {exc}")

+ 0 - 102
manual/clash/switch_proxy.py

@@ -1,102 +0,0 @@
-import httpx
-import time
-import json
-import uuid
-import logging
-import websockets
-from typing import Optional, List
-from websockets import WebSocketCommonProtocol
-
-logging.basicConfig(level=logging.INFO)
-
-
-class ClashProxyManager:
-    def __init__(self):
-        self.key_group = 0
-        self.clash_api_base_url = [
-            "http://192.168.31.201:58001/api",
-            "http://192.168.31.201:58002/api",
-            "http://192.168.31.201:58003/api",
-            "http://192.168.31.201:58004/api",
-            "http://192.168.31.201:58005/api",
-            "http://192.168.31.201:58006/api",
-            "http://192.168.31.201:58007/api",
-            "http://192.168.31.201:58008/api",
-            "http://192.168.31.201:58009/api",
-            "http://192.168.31.201:58010/api",
-        ]
-        self.proxy_keyword = [
-            ['sg', 'SG', '新加坡', '马来西亚'],
-            ['jp', '日本'],
-        ]
-        self.all_proxies = []
-        self.selected_proxies = []
-
-    def get_all_proxies(self, clash_tool_url: str) -> List[str]:
-        url = f"{clash_tool_url}/proxies"
-        try:
-            response = httpx.get(url)
-            response.raise_for_status()
-            proxies = response.json()
-            logging.info("Available proxies:")
-            # 输出读取的所有代理信息
-            # for proxy_name, proxy_info in proxies['proxies'].items():
-            #     logging.info(f"Name: {proxy_name}, Type: {proxy_info.get('type', 'Unknown')}")
-            return list(proxies['proxies'].keys())
-        except Exception as e:
-            logging.error(f"Failed to get proxies: {e}")
-            return []
-
-    def filter_proxy(self):
-        for keyword in self.proxy_keyword[self.key_group]:
-            for item in self.all_proxies:
-                if keyword.lower() in item.lower():
-                    self.selected_proxies.append(item)
-
-    def switch_proxy(self, proxy_name: str, clash_api_base_url: str) -> None:
-        url = f"{clash_api_base_url}/proxies/GLOBAL"
-        data = {"name": proxy_name}
-        try:
-            response = httpx.put(url, json=data)
-            if response.status_code == 204:
-                logging.info(f"Switched to proxy: {proxy_name}")
-            else:
-                logging.error(f"Failed to switch proxy: {response.status_code} - {proxy_name}")
-        except Exception as e:
-            logging.error(f"Failed to switch proxy: {e}")
-
-    def main(self) -> None:
-        # 读取所有代理
-        if not self.all_proxies:
-            for clash_tool_url in self.clash_api_base_url:
-                proxies = self.get_all_proxies(clash_tool_url)
-                if proxies:
-                    self.all_proxies = proxies
-                    break
-
-        if not self.all_proxies:
-            logging.error("Failed to get all proxies")
-            return
-
-        # 通过关键词过滤出需要的代理
-        self.filter_proxy()
-        if not self.selected_proxies:
-            logging.error("Failed to filter proxies")
-            return
-
-        # 遍历所有的线路api, 切换代理
-        # 两个循环, 设换不重复的代理
-        switched_agent = []
-        for clash_api_base_url in self.clash_api_base_url:
-            for select_proxy in self.selected_proxies:
-                if select_proxy in switched_agent:
-                    continue
-                else:
-                    self.switch_proxy(select_proxy, clash_api_base_url)
-                    switched_agent.append(select_proxy)
-                    break
-
-
-if __name__ == "__main__":
-    manager = ClashProxyManager()
-    manager.main()

+ 182 - 0
manual/clash/switch_proxy_group_group.py

@@ -0,0 +1,182 @@
+import httpx
+import time
+import json
+import uuid
+import logging
+import websockets
+from typing import Optional, List
+from websockets import WebSocketCommonProtocol
+
+logging.basicConfig(level=logging.INFO)
+
+BASE_URL = "http://192.168.31.194"
+
+PORT_LIST = [
+    ["58001", "59001"],
+    ["58002", "59002"],
+    ["58003", "59003"],
+    ["58004", "59004"],
+    ["58005", "59005"],
+    ["58006", "59006"],
+    ["58007", "59007"],
+    ["58008", "59008"],
+    ["58009", "59009"],
+    ["58010", "59010"],
+]
+
+TEST_URL = "https://httpbin.org/ip"
+
+
+class ClashProxyManager:
+    def __init__(self, base_url, base_port, test_url):
+        self.key_group = 0
+        self.base_url = base_url
+        self.base_port = base_port
+        self.proxy_keyword = [
+            ['sg', 'SG', '新加坡', '马来西亚'],
+            ['jp', '日本'],
+        ]
+        self.test_url = test_url
+        self.all_proxies = []
+        self.selected_proxies = []
+
+    def get_all_proxies(self, clash_tool_url: str) -> List[str]:
+        url = f"{clash_tool_url}/api/proxies"
+        try:
+            response = httpx.get(url)
+            response.raise_for_status()
+            proxies = response.json()
+            logging.info("Available proxies:")
+            # 输出读取的所有代理信息
+            # for proxy_name, proxy_info in proxies['proxies'].items():
+            #     logging.info(f"Name: {proxy_name}, Type: {proxy_info.get('type', 'Unknown')}")
+            return list(proxies['proxies'].keys())
+        except Exception as e:
+            logging.error(f"Failed to get proxies: {e}")
+            return []
+
+    def filter_proxy(self):
+        for keyword in self.proxy_keyword[self.key_group]:
+            for item in self.all_proxies:
+                if keyword.lower() in item.lower():
+                    self.selected_proxies.append(item)
+
+    def switch_proxy(self, proxy_name: str, url_and_port: str) -> None:
+        logging.info("switch proxy")
+        url = f"{url_and_port}/api/proxies/GLOBAL"
+        data = {"name": proxy_name}
+        try:
+            response = httpx.put(url, json=data)
+            if response.status_code == 204:
+                logging.info(f"Switched to proxy: {proxy_name}")
+            else:
+                logging.error(f"Failed to switch proxy: {response.status_code} - {proxy_name}")
+        except Exception as e:
+            logging.error(f"Failed to switch proxy: {e}")
+
+    def update_configs(self):
+        for base_port in self.base_port:
+            url_and_port = self.base_url + ":" + base_port[0]
+            key = "/api/configs"
+            url = url_and_port + key
+
+            headers = {
+                "accept": "application/json, text/plain, */*",
+                "accept-encoding": "gzip, deflate, br, zstd",
+                "accept-language": "zh-CN,zh",
+                "connection": "keep-alive",
+                "content-type": "application/json",
+                "origin": url_and_port,
+                "referer": url_and_port,
+                "sec-ch-ua": '"Not A(Brand";v="8", "Chromium";v="132", "Brave";v="132"',
+                "sec-ch-ua-mobile": "?0",
+                "sec-ch-ua-platform": '"macOS"',
+                "sec-fetch-dest": "empty",
+                "sec-fetch-mode": "cors",
+                "sec-fetch-site": "same-origin",
+                "sec-gpc": "1",
+                "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36",
+            }
+
+            # 请求体数据
+            data = {"mode": "Global"}  # 替换为实际的请求数据
+
+            # 使用 httpx 发送 PATCH 请求
+            try:
+                with httpx.Client() as client:
+                    response = client.patch(url, headers=headers, json=data)
+                    if response.status_code == 204:
+                        print(f"{url} OK")
+                    else:
+                        print("响应内容:", response.text)
+            except httpx.RequestError as exc:
+                print(f"请求失败: {exc}")
+
+    def check_proxy(self, proxy_url):
+        # proxy_url: 代理地址, 没有密码
+        # 测试目标地址:
+        try:
+            # 使用代理发送请求
+            response = httpx.get(
+                TEST_URL,
+                proxies={"http": proxy_url, "https": proxy_url},
+                timeout=5
+            )
+            response.raise_for_status()  # 检查请求是否成功
+            # 如果请求成功,返回 True
+            return True
+        except Exception:
+            # 如果发生任何异常,返回 False
+            return False
+
+    def main(self) -> None:
+        # 设置全局代理
+        self.update_configs()
+
+        # 读取所有代理
+        if not self.all_proxies:
+            for port_list in self.base_port:
+                base_url = self.base_url + ":" + port_list[0]
+                clash_tool_url = f"{base_url}"
+                proxies = self.get_all_proxies(clash_tool_url)
+                if proxies:
+                    self.all_proxies = proxies
+                    break
+
+        if not self.all_proxies:
+            logging.error("Failed to get all proxies")
+            return
+
+        # 通过关键词过滤出需要的代理
+        self.filter_proxy()
+        if not self.selected_proxies:
+            logging.error("Failed to filter proxies")
+            return
+
+        # 遍历所有的线路api, 切换不重复代理
+        # 切换后, 检测代理, 如果检测返回失败, 再次切换
+        used_proxy = []
+        for base_port in self.base_port:
+            url_and_port = self.base_url + ":" + base_port[0]
+            proxy_url = self.base_url + ":" + base_port[1]
+            for select_proxy in self.selected_proxies:
+                if select_proxy in used_proxy:
+                    continue
+
+                # 尝试切换代理并检测
+                self.switch_proxy(select_proxy, url_and_port)
+                if self.check_proxy(proxy_url):
+                    print(f"代理 {select_proxy} 切换成功,检测通过!")
+                    used_proxy.append(select_proxy)  # 标记为已使用
+                    break  # 成功后退出当前代理的重试循环
+                else:
+                    print(f"{url_and_port} 切换 {select_proxy} 检测失败")
+                    time.sleep(1)  # 等待一段时间后重试
+
+                # 如果当前代理成功,直接进入下一个 base_port 的处理
+                break
+
+
+if __name__ == "__main__":
+    manager = ClashProxyManager(BASE_URL, PORT_LIST, TEST_URL)
+    manager.main()