From e98793d0a7af43878cf023fb045dd945a82507cf Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Wed, 6 Nov 2024 17:25:09 +0200 Subject: Update (g4f/models.py g4f/Provider/ docs/providers-and-models.md) --- g4f/Provider/Aura.py | 49 ------------------------------------------------- 1 file changed, 49 deletions(-) delete mode 100644 g4f/Provider/Aura.py (limited to 'g4f/Provider/Aura.py') diff --git a/g4f/Provider/Aura.py b/g4f/Provider/Aura.py deleted file mode 100644 index e2c56754..00000000 --- a/g4f/Provider/Aura.py +++ /dev/null @@ -1,49 +0,0 @@ -from __future__ import annotations - -from aiohttp import ClientSession - -from ..typing import AsyncResult, Messages -from .base_provider import AsyncGeneratorProvider -from ..requests import get_args_from_browser -from ..webdriver import WebDriver - -class Aura(AsyncGeneratorProvider): - url = "https://openchat.team" - working = False - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - proxy: str = None, - temperature: float = 0.5, - max_tokens: int = 8192, - webdriver: WebDriver = None, - **kwargs - ) -> AsyncResult: - args = get_args_from_browser(cls.url, webdriver, proxy) - async with ClientSession(**args) as session: - new_messages = [] - system_message = [] - for message in messages: - if message["role"] == "system": - system_message.append(message["content"]) - else: - new_messages.append(message) - data = { - "model": { - "id": "openchat_3.6", - "name": "OpenChat 3.6 (latest)", - "maxLength": 24576, - "tokenLimit": max_tokens - }, - "messages": new_messages, - "key": "", - "prompt": "\n".join(system_message), - "temperature": temperature - } - async with session.post(f"{cls.url}/api/chat", json=data, proxy=proxy) as response: - response.raise_for_status() - async for chunk in response.content.iter_any(): - yield chunk.decode(error="ignore") -- cgit v1.2.3