Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 12 additions & 9 deletions config.conf.example
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,18 @@ default_ai = gemini

# --- Gemini Model Configuration ---
# Choose the model to be used when Gemini is selected as the AI.
# Available models:
# - gemini-1.5-flash
# - gemini-2.0-flash
# - gemini-2.0-flash-thinking
# - gemini-2.0-flash-thinking-with-apps
# - gemini-2.5-pro
# - gemini-2.5-flash
# - gemini-3.0-pro
default_model_gemini = gemini-3.0-pro
# Available models (gemini-webapi >= 2.0.0):
# - gemini-3-pro
# - gemini-3-flash
# - gemini-3-flash-thinking
# - gemini-3-pro-plus (Plus tier)
# - gemini-3-flash-plus (Plus tier)
# - gemini-3-flash-thinking-plus (Plus tier)
# - gemini-3-pro-advanced (Advanced tier)
# - gemini-3-flash-advanced (Advanced tier)
# - gemini-3-flash-thinking-advanced (Advanced tier)
# - unspecified (use account default)
default_model_gemini = gemini-3-flash

# --- Gemini Cookies ---
# Provide your authentication cookies for Gemini here.
Expand Down
12 changes: 6 additions & 6 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@ annotated-types==0.7.0 ; python_version >= "3.10" and python_version < "4.0"
anyio==4.8.0 ; python_version >= "3.10" and python_version < "4.0"
browser-cookie3==0.20.1 ; python_version >= "3.10" and python_version < "4.0"
certifi==2024.12.14 ; python_version >= "3.10" and python_version < "4.0"
cffi==1.17.1 ; python_version >= "3.10" and python_version < "4.0"
cffi>=1.17.1 ; python_version >= "3.10" and python_version < "4.0"
click==8.1.8 ; python_version >= "3.10" and python_version < "4.0"
colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" or python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
curl-cffi==0.7.4 ; python_version >= "3.10" and python_version < "4.0"
curl-cffi>=0.7.4 ; python_version >= "3.10" and python_version < "4.0"
exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11"
fastapi==0.115.7 ; python_version >= "3.10" and python_version < "4.0"
gemini-webapi==1.8.3 ; python_version >= "3.10" and python_version < "4.0"
gemini-webapi>=2.0.0 ; python_version >= "3.10" and python_version < "4.0"
h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0"
h2==4.1.0 ; python_version >= "3.10" and python_version < "4.0"
hpack==4.1.0 ; python_version >= "3.10" and python_version < "4.0"
Expand All @@ -21,13 +21,13 @@ loguru==0.7.3 ; python_version >= "3.10" and python_version < "4.0"
lz4==4.4.3 ; python_version >= "3.10" and python_version < "4.0"
pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0"
pycryptodomex==3.21.0 ; python_version >= "3.10" and python_version < "4.0"
pydantic-core==2.27.2 ; python_version >= "3.10" and python_version < "4.0"
pydantic==2.10.6 ; python_version >= "3.10" and python_version < "4.0"
pydantic-core>=2.27.2 ; python_version >= "3.10" and python_version < "4.0"
pydantic>=2.10.6 ; python_version >= "3.10" and python_version < "4.0"
pywin32==308 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
shadowcopy==0.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0"
starlette==0.45.3 ; python_version >= "3.10" and python_version < "4.0"
typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0"
typing-extensions>=4.12.2 ; python_version >= "3.10" and python_version < "4.0"
uvicorn==0.34.0 ; python_version >= "3.10" and python_version < "4.0"
win32-setctime==1.2.0 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32"
wmi==1.5.1 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
2 changes: 1 addition & 1 deletion src/app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def load_config(config_file: str = "config.conf") -> configparser.ConfigParser:
if "Cookies" not in config:
config["Cookies"] = {}
if "AI" not in config:
config["AI"] = {"default_model_gemini": "gemini-3.0-pro"}
config["AI"] = {"default_model_gemini": "gemini-3-flash"}
if "Proxy" not in config:
config["Proxy"] = {"http_proxy": ""}

Expand Down
26 changes: 25 additions & 1 deletion src/app/endpoints/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,30 @@

router = APIRouter()

@router.get("/v1/gems")
async def list_gems():
try:
gemini_client = get_gemini_client()
except GeminiClientNotInitializedError as e:
raise HTTPException(status_code=503, detail=str(e))

try:
gems = await gemini_client.fetch_gems()
return {
"gems": [
{
"id": gem.id,
"name": gem.name,
"description": gem.description,
"predefined": gem.predefined,
}
for gem in gems
]
}
except Exception as e:
logger.error(f"Error fetching gems: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error fetching gems: {str(e)}")

@router.post("/translate")
async def translate_chat(request: GeminiRequest):
try:
Expand Down Expand Up @@ -85,7 +109,7 @@ async def chat_completions(request: OpenAIChatRequest):

if request.model:
try:
response = await gemini_client.generate_content(message=final_prompt, model=request.model.value, files=None)
response = await gemini_client.generate_content(message=final_prompt, model=request.model.value, files=None, gem=request.gem)
return convert_to_openai_format(response.text, request.model.value, is_stream)
except Exception as e:
logger.error(f"Error in /v1/chat/completions endpoint: {e}", exc_info=True)
Expand Down
2 changes: 1 addition & 1 deletion src/app/endpoints/gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ async def gemini_generate(request: GeminiRequest):
try:
# Use the value attribute for the model (since GeminiRequest.model is an Enum)
files: Optional[List[Union[str, Path]]] = [Path(f) for f in request.files] if request.files else None
response = await gemini_client.generate_content(request.message, request.model.value, files=files)
response = await gemini_client.generate_content(request.message, request.model.value, files=files, gem=request.gem)
return {"response": response.text}
except Exception as e:
logger.error(f"Error in /gemini endpoint: {e}", exc_info=True)
Expand Down
63 changes: 58 additions & 5 deletions src/models/gemini.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,83 @@
# src/models/gemini.py
import configparser
import logging
import os
from typing import Optional, List, Union
from pathlib import Path
from gemini_webapi import GeminiClient as WebGeminiClient
from app.config import CONFIG

logger = logging.getLogger("app")


class MyGeminiClient:
"""
Wrapper for the Gemini Web API client.
"""
def __init__(self, secure_1psid: str, secure_1psidts: str, proxy: str | None = None) -> None:
self.client = WebGeminiClient(secure_1psid, secure_1psidts, proxy)
self._gems_cache = None

async def init(self) -> None:
"""Initialize the Gemini client."""
"""Initialise the Gemini client and persist any rotated cookies."""
await self.client.init()
async def generate_content(self, message: str, model: str, files: Optional[List[Union[str, Path]]] = None):
await self._persist_cookies()

async def _persist_cookies(self) -> None:
"""Persist rotated cookies back to config.conf to survive restarts."""
config_path = "config.conf"
if not os.path.exists(config_path):
return
try:
cookies = self.client.cookies
psid = cookies.get("__Secure-1PSID")
psidts = cookies.get("__Secure-1PSIDTS")
if not psid:
return
cfg = configparser.ConfigParser()
cfg.read(config_path, encoding="utf-8")
if "Cookies" not in cfg:
cfg["Cookies"] = {}
cfg["Cookies"]["gemini_cookie_1psid"] = psid
if psidts:
cfg["Cookies"]["gemini_cookie_1psidts"] = psidts
with open(config_path, "w", encoding="utf-8") as f:
cfg.write(f)
logger.info("Cookies persisted to config.conf after rotation.")
except Exception as e:
logger.warning(f"Failed to persist cookies: {e}")

async def generate_content(
self,
message: str,
model: str,
files: Optional[List[Union[str, Path]]] = None,
gem: Optional[str] = None,
):
"""
Generate content using the Gemini client.
"""
return await self.client.generate_content(message, model=model, files=files)
resolved_gem = await self._resolve_gem(gem) if gem else None
return await self.client.generate_content(message, model=model, files=files, gem=resolved_gem)

async def fetch_gems(self):
"""Fetch available gems and cache them."""
self._gems_cache = await self.client.fetch_gems()
return self._gems_cache

async def _resolve_gem(self, gem_id_or_name: str):
"""Resolve a gem by ID or name."""
if self._gems_cache is None:
await self.fetch_gems()
for gem in self._gems_cache:
if gem.id == gem_id_or_name or gem.name.lower() == gem_id_or_name.lower():
return gem
return gem_id_or_name

async def close(self) -> None:
"""Close the Gemini client."""
await self.client.close()

def start_chat(self, model: str):
def start_chat(self, model: str, gem: Optional[str] = None):
"""
Start a chat session with the given model.
"""
Expand Down
26 changes: 20 additions & 6 deletions src/schemas/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,39 @@
class GeminiModels(str, Enum):
"""
An enumeration of the available Gemini models.
Matches model names from gemini-webapi >= 2.0.0.
"""

# Gemini 3.0 Series
PRO_3_0 = "gemini-3.0-pro"
# Gemini 3 Series
PRO_3 = "gemini-3-pro"
FLASH_3 = "gemini-3-flash"
FLASH_3_THINKING = "gemini-3-flash-thinking"

# Gemini 2.5 Series
PRO_2_5 = "gemini-2.5-pro"
FLASH_2_5 = "gemini-2.5-flash"
# Gemini 3 Plus Series
PRO_3_PLUS = "gemini-3-pro-plus"
FLASH_3_PLUS = "gemini-3-flash-plus"
FLASH_3_THINKING_PLUS = "gemini-3-flash-thinking-plus"

# Gemini 3 Advanced Series
PRO_3_ADVANCED = "gemini-3-pro-advanced"
FLASH_3_ADVANCED = "gemini-3-flash-advanced"
FLASH_3_THINKING_ADVANCED = "gemini-3-flash-thinking-advanced"

# Unspecified (use server default)
UNSPECIFIED = "unspecified"


class GeminiRequest(BaseModel):
message: str
model: GeminiModels = Field(default=GeminiModels.FLASH_2_5, description="Model to use for Gemini.")
model: GeminiModels = Field(default=GeminiModels.FLASH_3, description="Model to use for Gemini.")
files: Optional[List[str]] = []
gem: Optional[str] = Field(default=None, description="Gem ID or name to use as system prompt.")

class OpenAIChatRequest(BaseModel):
messages: List[dict]
model: Optional[GeminiModels] = None
stream: Optional[bool] = False
gem: Optional[str] = Field(default=None, description="Gem ID or name to use as system prompt.")

class Part(BaseModel):
text: str
Expand Down