1220 条记录
52 私有链接
52 私有链接
import requests
import json
import time
import uuid
import re
import logging
import os
class ThinkBuddyClient:
def __init__(self, api_key):
self.api_key = api_key # Pass the API key during initialization
self.id_token = None
self.refresh_token = None
self.local_id = None
self.expiration_time = 0 # token过期时间戳
self.logger = self._setup_logger()
def _setup_logger(self):
log_dir = "./logs/"
os.makedirs(log_dir, exist_ok=True)
today_str = time.strftime("%Y-%m-%d")
log_file = os.path.join(log_dir, f"thinkbuddy_{today_str}.log")
logger = logging.getLogger("thinkbuddy_api")
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler = logging.FileHandler(log_file, mode="a", encoding="utf-8")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
def _generate_device_id(self):
return str(uuid.uuid4())
def _get_headers(self):
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0',
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate, br, zstd',
'X-Client-Version': 'Firefox/JsCore/10.14.1/FirebaseCore-web',
'X-Firebase-gmpid': '1:123807869619:web:43b278a622ed6322789ec6',
'X-Firebase-Client': 'eyJ2ZXJzaW9uIjoyLCJoZWFydGJlYXRzIjpbeyJhZ2VudCI6ImZpcmUtY29yZS8wLjEwLjEzIGZpcmUtY29yZS1lc20yMDE3LzAuMTAuMTMgZmlyZS1qcy8gZmlyZS1mc3QvNC43LjMgZmlyZS1mc3QtZXNtMjAxNy80LjcuMyBmaXJlLWF1dGgvMS43LjkgZmlyZS1hdXRoLWVzbTIwMTcvMS43LjkgZmlyZS1qcy1hbGwtYXBwLzEwLjE0LjEiLCJkYXRlcyI6WyIyMDI1LTAxLTE2Il19XX0',
'Content-Type': 'application/json',
'Origin': 'https://thinkbuddy.ai',
'DNT': '1',
'Sec-GPC': '1',
'Connection': 'keep-alive',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'cross-site',
'Priority': 'u=6',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'TE': 'trailers',
}
return headers
def _check_token(self):
if time.time() > self.expiration_time:
self._register_user()
def _register_user(self):
url = f'https://sour-goat-10.deno.dev/v1/accounts:signUp?key={self.api_key}'
headers = self._get_headers()
data = {"returnSecureToken": True}
retries = 3
for attempt in range(retries):
try:
response = requests.post(url, headers=headers, json=data)
response.raise_for_status() # 抛出HTTPError错误
res = response.json()
self.id_token = res['idToken']
self.refresh_token = res['refreshToken']
self.local_id = res['localId']
self.expiration_time = time.time() + int(res['expiresIn'])
self.logger.info("User registered and token updated successfully")
return
except requests.exceptions.RequestException as e:
self.logger.error(f"Error registering user (attempt {attempt + 1}): {e}")
if attempt == retries - 1:
raise
time.sleep(1) # Wait before retrying
_model_cache = None
_model_cache_time = 0
def list_models(self):
if self._model_cache and time.time() - self._model_cache_time < 7200:
return self._model_cache
self._check_token()
url = 'https://api.thinkbuddy.ai/v1/chat/models'
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0',
'Accept': 'application/json, text/plain, */*',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate, br, zstd',
'Referer': 'https://thinkbuddy.ai/',
'Authorization': f'Bearer {self.id_token}',
'Origin': 'https://thinkbuddy.ai',
'DNT': '1',
'Sec-GPC': '1',
'Connection': 'keep-alive',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-site',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'TE': 'trailers'
}
try:
response = requests.get(url, headers=headers)
response.raise_for_status()
self._model_cache = response.json()
self._model_cache_time = time.time()
return self._model_cache
except requests.exceptions.RequestException as e:
self.logger.error(f"Error fetching models: {e}")
raise
def chat_completions_stream(self, model, messages, temperature=1, top_p=1, max_tokens=8192):
self._check_token()
url = 'https://api.thinkbuddy.ai/v1/chat/completions'
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0',
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate, br, zstd',
'Referer': 'https://thinkbuddy.ai/',
'Content-Type': 'application/json',
'Authorization': f'Bearer {self.id_token}',
'Origin': 'https://thinkbuddy.ai',
'DNT': '1',
'Sec-GPC': '1',
'Connection': 'keep-alive',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-site',
'Priority': 'u=0',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'TE': 'trailers'
}
data = {
"model": model,
"messages": messages,
"temperature": temperature,
"top_p": top_p,
"max_tokens": max_tokens,
"stream": True
}
try:
response = requests.post(url, headers=headers, json=data, stream=True)
response.raise_for_status()
for line in response.iter_lines():
if line:
yield line
except requests.exceptions.RequestException as e:
self.logger.error(f"Error during chat completion: {e}")
yield f"data: {json.dumps({'error': str(e)})}\n\n".encode('utf-8')
def chat_completions(self, model, messages, temperature=1, top_p=1, max_tokens=8192, stream=False):
if stream:
raise ValueError("Use chat_completions_stream to stream responses")
self._check_token()
url = 'https://api.thinkbuddy.ai/v1/chat/completions'
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0',
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate, br, zstd',
'Referer': 'https://thinkbuddy.ai/',
'Content-Type': 'application/json',
'Authorization': f'Bearer {self.id_token}',
'Origin': 'https://thinkbuddy.ai',
'DNT': '1',
'Sec-GPC': '1',
'Connection': 'keep-alive',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-site',
'Priority': 'u=0',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'TE': 'trailers'
}
data = {
"model": model,
"messages": messages,
"temperature": temperature,
"top_p": top_p,
"max_tokens": max_tokens,
"stream": stream
}
try:
response = requests.post(url, headers=headers, json=data, stream=False)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
self.logger.error(f"Error during chat completion: {e}")
raise
if __name__ == "__main__":
client = ThinkBuddyClient("AIzaSyDBGhSNlMqApZfTMurRSHS1bh-SKMfWImw")
print(client.chat_completions("claude-3-5-sonnet", [{"role": "user", "content": "hi"}]))
from fastapi import FastAPI, Header, HTTPException, Request, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from typing import List, Optional
from typing_extensions import Annotated # Import Annotated from typing_extensions
import uuid
import asyncio
import json
from api import ThinkBuddyClient
import time
from datetime import datetime
import logging
import os
app = FastAPI(
title="ThinkBuddy API",
description="ThinkBuddy API 代理服务,支持自动创建会话",
version="1.0.0"
)
# 配置 CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# 存储用户会话信息
user_sessions = {}
class Message(BaseModel):
role: str
content: str
class ChatRequest(BaseModel):
messages: List[Message]
model: Optional[str] = "claude-3-5-sonnet"
temperature: Optional[float] = 1.0
stream: Optional[bool] = True
class APIKeyHeader(BaseModel):
api_key: str
async def get_api_key_header(authorization: str = Header(...)) -> APIKeyHeader:
try:
scheme, api_key = authorization.split(" ", 1)
if scheme.lower() != "bearer":
raise HTTPException(status_code=400, detail="Authorization header must be Bearer token")
return APIKeyHeader(api_key=api_key)
except ValueError:
raise HTTPException(status_code=400, detail="Authorization header is missing or malformed")
async def get_or_create_client(session_id: str, api_key: Annotated[APIKeyHeader, Depends(get_api_key_header)]) -> ThinkBuddyClient:
if session_id not in user_sessions:
client = ThinkBuddyClient(api_key=api_key.api_key)
# 确保客户端完成初始化
client._register_user()
user_sessions[session_id] = client
return user_sessions[session_id]
async def stream_response(client: ThinkBuddyClient, model: str, messages: List[Message], temperature: float, logger):
"""Helper function to stream responses from ThinkBuddy API."""
def generate():
try:
for line in client.chat_completions_stream(
model=model,
messages=[{"role": msg.role, "content": msg.content} for msg in messages],
temperature=temperature
):
if line:
if line == b"data: [DONE]":
yield b"data: [DONE]\n\n"
break
try:
json_str = line.decode('utf-8').replace('data: ', '')
json_data = json.loads(json_str)
if 'choices' in json_data:
for choice in json_data['choices']:
if 'delta' in choice and choice['delta']:
delta = choice['delta']
if 'content' in delta:
payload = {"choices": [{"delta": delta, "index": 0, "finish_reason": None}]}
yield f"data: {json.dumps(payload)}\n\n".encode('utf-8')
elif 'finish_reason' in choice:
payload = {"choices": [{"delta": {}, "index": 0, "finish_reason": choice['finish_reason']}]}
yield f"data: {json.dumps(payload)}\n\n".encode('utf-8')
except json.JSONDecodeError as e:
print(f"JSON Decode Error:{e}, line:{line}")
logger.error(f"JSON Decode Error:{e}, line:{line}")
yield f"data: {json.dumps({'error': str(e)})}\n\n".encode('utf-8')
except Exception as e:
logger.error(f"Error in stream_response: {e}")
yield f"data: {json.dumps({'error': str(e)})}\n\n".encode('utf-8')
return StreamingResponse(generate(), media_type="text/event-stream")
@app.middleware("http")
async def log_requests(request: Request, call_next):
start_time = time.time()
response = await call_next(request)
process_time = time.time() - start_time
log_dir = "./logs/"
os.makedirs(log_dir, exist_ok=True)
today_str = time.strftime("%Y-%m-%d")
log_file = os.path.join(log_dir, f"thinkbuddy_api_{today_str}.log")
logger = logging.getLogger("request_log")
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler = logging.FileHandler(log_file, mode="a", encoding="utf-8")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
try:
body = await request.body()
if body:
body = body.decode('utf-8')
else:
body = ""
except Exception as e:
body = f"Failed to parse body: {str(e)}"
if request.url.path == '/v1/chat/completions':
output = "Streaming response"
else:
try:
output = await response.body()
output = output.decode('utf-8')
except Exception as e:
output = f"Failed to parse body: {str(e)}"
log_entry = {
"ip": request.client.host,
"method": request.method,
"url": str(request.url),
"headers": dict(request.headers),
"body": body,
"output": output,
"process_time": f"{process_time:.4f}s"
}
logger.info(json.dumps(log_entry, ensure_ascii=False))
return response
@app.post("/v1/chat/completions")
async def chat_completion(
request: ChatRequest,
api_key: Annotated[APIKeyHeader, Depends(get_api_key_header)],
x_session_id: Optional[str] = Header(None)
):
# 如果没有会话 ID,创建新的会话
session_id = x_session_id or str(uuid.uuid4())
try:
# 获取或创建 ThinkBuddy 客户端
client = await get_or_create_client(session_id, api_key)
# 调用 ThinkBuddy API
if request.stream:
return await stream_response(
client=client,
model=request.model,
messages=request.messages,
temperature=request.temperature,
logger = client.logger
)
else:
response = client.chat_completions(
model=request.model,
messages=[{"role": msg.role, "content": msg.content} for msg in request.messages],
temperature=request.temperature,
stream=False
)
return response
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Error calling ThinkBuddy API: {str(e)}"
)
@app.get("/v1/models")
async def list_models(
api_key: Annotated[APIKeyHeader, Depends(get_api_key_header)],
x_session_id: Optional[str] = Header(None)
):
"""获取可用的模型列表"""
session_id = x_session_id or str(uuid.uuid4())
try:
client = await get_or_create_client(session_id, api_key)
return client.list_models()
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Error fetching models: {str(e)}"
)
@app.get("/")
async def root():
return {
"status": "ok",
"message": "ThinkBuddy API 代理服务正在运行"
}