diff --git a/docker/Dockerfile b/docker/Dockerfile
index a84703aab66..0c45da207b5 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -84,4 +84,4 @@ RUN pip install --break-system-packages --upgrade pip \
ADD --chown=$G4F_USER:$G4F_USER g4f $G4F_DIR/g4f
# Expose ports
-EXPOSE 8080 1337
+EXPOSE 8080 1337 7900
diff --git a/g4f/Provider/You.py b/g4f/Provider/You.py
index 2d4f7ca5551..b91fb0d8782 100644
--- a/g4f/Provider/You.py
+++ b/g4f/Provider/You.py
@@ -10,7 +10,7 @@
from ..image import ImageResponse, ImagePreview, EXTENSIONS_MAP, to_bytes, is_accepted_format
from ..requests import StreamSession, FormData, raise_for_status, get_nodriver
from ..cookies import get_cookies
-from ..errors import MissingRequirementsError
+from ..errors import MissingRequirementsError, ResponseError
from .. import debug
class You(AsyncGeneratorProvider, ProviderModelMixin):
@@ -23,18 +23,19 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
models = [
default_model,
"gpt-4o",
+ "gpt-4o-mini",
"gpt-4-turbo",
- "gpt-4",
+ "grok-2",
"claude-3.5-sonnet",
+ "claude-3.5-haiku",
"claude-3-opus",
"claude-3-sonnet",
"claude-3-haiku",
- "claude-2",
+ "llama-3.3-70b",
"llama-3.1-70b",
"llama-3",
"gemini-1-5-flash",
"gemini-1-5-pro",
- "gemini-1-0-pro",
"databricks-dbrx-instruct",
"command-r",
"command-r-plus",
@@ -105,19 +106,14 @@ async def create_async_generator(
"conversationTurnId": str(uuid.uuid4()),
"chatId": str(uuid.uuid4()),
}
- params = {
- "userFiles": upload,
- "selectedChatMode": chat_mode,
- }
if chat_mode == "custom":
if debug.logging:
print(f"You model: {model}")
- params["selectedAiModel"] = model.replace("-", "_")
+ data["selectedAiModel"] = model.replace("-", "_")
- async with (session.post if chat_mode == "default" else session.get)(
+ async with session.get(
f"{cls.url}/api/streamingSearch",
- data=data if chat_mode == "default" else None,
- params=params if chat_mode == "default" else data,
+ params=data,
headers=headers,
cookies=cookies
) as response:
@@ -126,9 +122,13 @@ async def create_async_generator(
if line.startswith(b'event: '):
event = line[7:].decode()
elif line.startswith(b'data: '):
+ if event == "error":
+ raise ResponseError(line[6:])
if event in ["youChatUpdate", "youChatToken"]:
data = json.loads(line[6:])
if event == "youChatToken" and event in data and data[event]:
+ if data[event].startswith("#### You\'ve hit your free quota for the Model Agent. For more usage of the Model Agent, learn more at:"):
+ continue
yield data[event]
elif event == "youChatUpdate" and "t" in data and data["t"]:
if chat_mode == "create":
diff --git a/g4f/Provider/hf_space/CohereForAI.py b/g4f/Provider/hf_space/CohereForAI.py
new file mode 100644
index 00000000000..4adeef60175
--- /dev/null
+++ b/g4f/Provider/hf_space/CohereForAI.py
@@ -0,0 +1,95 @@
+from __future__ import annotations
+
+import json
+import uuid
+from aiohttp import ClientSession, FormData
+
+from ...typing import AsyncResult, Messages
+from ...requests import raise_for_status
+from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from ..helper import format_prompt
+from ...providers.response import JsonConversation, TitleGeneration
+
+class CohereForAI(AsyncGeneratorProvider, ProviderModelMixin):
+ url = "https://cohereforai-c4ai-command.hf.space"
+ conversation_url = f"{url}/conversation"
+
+ working = True
+
+ default_model = "command-r-plus-08-2024"
+ models = [
+ default_model,
+ "command-r-08-2024",
+ "command-r-plus",
+ "command-r",
+ "command-r7b-12-2024",
+ ]
+
+ @classmethod
+ async def create_async_generator(
+ cls, model: str, messages: Messages,
+ api_key: str = None,
+ proxy: str = None,
+ conversation: JsonConversation = None,
+ return_conversation: bool = False,
+ **kwargs
+ ) -> AsyncResult:
+ model = cls.get_model(model)
+ headers = {
+ "Origin": cls.url,
+ "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:133.0) Gecko/20100101 Firefox/133.0",
+ "Accept": "*/*",
+ "Accept-Language": "en-US,en;q=0.5",
+ "Referer": "https://cohereforai-c4ai-command.hf.space/",
+ "Sec-Fetch-Dest": "empty",
+ "Sec-Fetch-Mode": "cors",
+ "Sec-Fetch-Site": "same-origin",
+ "Priority": "u=4",
+ }
+ if api_key is not None:
+ headers["Authorization"] = f"Bearer {api_key}"
+ async with ClientSession(
+ headers=headers,
+ cookies=None if conversation is None else conversation.cookies
+ ) as session:
+ system_prompt = "\n".join([message["content"] for message in messages if message["role"] == "system"])
+ messages = [message for message in messages if message["role"] != "system"]
+ inputs = format_prompt(messages) if conversation is None else messages[-1]["content"]
+ if conversation is None or conversation.model != model or conversation.preprompt != system_prompt:
+ data = {"model": model, "preprompt": system_prompt}
+ async with session.post(cls.conversation_url, json=data, proxy=proxy) as response:
+ await raise_for_status(response)
+ conversation = JsonConversation(
+ **await response.json(),
+ **data,
+ cookies={n: c.value for n, c in response.cookies.items()}
+ )
+ if return_conversation:
+ yield conversation
+ async with session.get(f"{cls.conversation_url}/{conversation.conversationId}/__data.json?x-sveltekit-invalidated=11", proxy=proxy) as response:
+ await raise_for_status(response)
+ node = json.loads((await response.text()).splitlines()[0])["nodes"][1]
+ if node["type"] == "error":
+ raise RuntimeError(node["error"])
+ data = node["data"]
+ message_id = data[data[data[data[0]["messages"]][-1]]["id"]]
+ data = FormData()
+ inputs = messages[-1]["content"]
+ data.add_field(
+ "data",
+ json.dumps({"inputs": inputs, "id": message_id, "is_retry": False, "is_continue": False, "web_search": False, "tools": []}),
+ content_type="application/json"
+ )
+ async with session.post(f"{cls.conversation_url}/{conversation.conversationId}", data=data, proxy=proxy) as response:
+ await raise_for_status(response)
+ async for chunk in response.content:
+ try:
+ data = json.loads(chunk)
+ except (json.JSONDecodeError) as e:
+ raise RuntimeError(f"Failed to read response: {chunk.decode(errors='replace')}", e)
+ if data["type"] == "stream":
+ yield data["token"].replace("\u0000", "")
+ elif data["type"] == "title":
+ yield TitleGeneration(data["title"])
+ elif data["type"] == "finalAnswer":
+ break
\ No newline at end of file
diff --git a/g4f/Provider/hf_space/__init__.py b/g4f/Provider/hf_space/__init__.py
index 64be9d34ec0..98856218574 100644
--- a/g4f/Provider/hf_space/__init__.py
+++ b/g4f/Provider/hf_space/__init__.py
@@ -8,6 +8,7 @@
from .BlackForestLabsFlux1Schnell import BlackForestLabsFlux1Schnell
from .VoodoohopFlux1Schnell import VoodoohopFlux1Schnell
from .StableDiffusion35Large import StableDiffusion35Large
+from .CohereForAI import CohereForAI
from .Qwen_QVQ_72B import Qwen_QVQ_72B
from .Qwen_Qwen_2_72B_Instruct import Qwen_Qwen_2_72B_Instruct
@@ -19,7 +20,7 @@ class HuggingSpace(AsyncGeneratorProvider, ProviderModelMixin):
default_model = BlackForestLabsFlux1Dev.default_model
default_vision_model = Qwen_QVQ_72B.default_model
- providers = [BlackForestLabsFlux1Dev, BlackForestLabsFlux1Schnell, VoodoohopFlux1Schnell, StableDiffusion35Large, Qwen_QVQ_72B, Qwen_Qwen_2_72B_Instruct]
+ providers = [BlackForestLabsFlux1Dev, BlackForestLabsFlux1Schnell, VoodoohopFlux1Schnell, StableDiffusion35Large, CohereForAI, Qwen_QVQ_72B, Qwen_Qwen_2_72B_Instruct]
@classmethod
def get_parameters(cls, **kwargs) -> dict:
@@ -31,11 +32,13 @@ def get_parameters(cls, **kwargs) -> dict:
@classmethod
def get_models(cls, **kwargs) -> list[str]:
if not cls.models:
+ models = []
for provider in cls.providers:
- cls.models.extend(provider.get_models(**kwargs))
- cls.models.extend(provider.model_aliases.keys())
- cls.models = list(set(cls.models))
- cls.models.sort()
+ models.extend(provider.get_models(**kwargs))
+ models.extend(provider.model_aliases.keys())
+ models = list(set(models))
+ models.sort()
+ cls.models = models
return cls.models
@classmethod
diff --git a/g4f/Provider/needs_auth/HuggingFace.py b/g4f/Provider/needs_auth/HuggingFace.py
index c15dc767a19..9d4e353847f 100644
--- a/g4f/Provider/needs_auth/HuggingFace.py
+++ b/g4f/Provider/needs_auth/HuggingFace.py
@@ -28,15 +28,16 @@ class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
def get_models(cls) -> list[str]:
if not cls.models:
url = "https://huggingface.co/api/models?inference=warm&pipeline_tag=text-generation"
- cls.models = [model["id"] for model in requests.get(url).json()]
- cls.models.append("meta-llama/Llama-3.2-11B-Vision-Instruct")
- cls.models.append("nvidia/Llama-3.1-Nemotron-70B-Instruct-HF")
- cls.models.sort()
- if not cls.image_models:
- url = "https://huggingface.co/api/models?pipeline_tag=text-to-image"
- cls.image_models = [model["id"] for model in requests.get(url).json() if model["trendingScore"] >= 20]
- cls.image_models.sort()
- cls.models.extend(cls.image_models)
+ models = [model["id"] for model in requests.get(url).json()]
+ models.append("meta-llama/Llama-3.2-11B-Vision-Instruct")
+ models.append("nvidia/Llama-3.1-Nemotron-70B-Instruct-HF")
+ models.sort()
+ if not cls.image_models:
+ url = "https://huggingface.co/api/models?pipeline_tag=text-to-image"
+ cls.image_models = [model["id"] for model in requests.get(url).json() if model["trendingScore"] >= 20]
+ cls.image_models.sort()
+ models.extend(cls.image_models)
+ cls.models = models
return cls.models
@classmethod
diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py
index 4b800ff45ad..119a118d261 100644
--- a/g4f/Provider/needs_auth/__init__.py
+++ b/g4f/Provider/needs_auth/__init__.py
@@ -25,7 +25,6 @@
from .Raycast import Raycast
from .Reka import Reka
from .Replicate import Replicate
-from .Theb import Theb
from .ThebApi import ThebApi
from .WhiteRabbitNeo import WhiteRabbitNeo
from .xAI import xAI
diff --git a/g4f/Provider/needs_auth/Theb.py b/g4f/Provider/not_working/Theb.py
similarity index 100%
rename from g4f/Provider/needs_auth/Theb.py
rename to g4f/Provider/not_working/Theb.py
diff --git a/g4f/Provider/not_working/__init__.py b/g4f/Provider/not_working/__init__.py
index a58870c20ba..9ca89cf93cc 100644
--- a/g4f/Provider/not_working/__init__.py
+++ b/g4f/Provider/not_working/__init__.py
@@ -14,4 +14,5 @@
from .MagickPen import MagickPen
from .MyShell import MyShell
from .RobocodersAPI import RobocodersAPI
+from .Theb import Theb
from .Upstage import Upstage
diff --git a/g4f/gui/client/index.html b/g4f/gui/client/index.html
index b6b17fcd833..bbf41314597 100644
--- a/g4f/gui/client/index.html
+++ b/g4f/gui/client/index.html
@@ -142,6 +142,10 @@
Settings
+
+
+
+
-
+