Compare commits
2 Commits
46937fbc10
...
2552017ea7
| Author | SHA1 | Date |
|---|---|---|
|
|
2552017ea7 | |
|
|
1bf3aaf154 |
|
|
@ -14,6 +14,21 @@ class AgentFactory:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def create_model(self, protocol_name: str, api_key: str, url: str | None, model_id: str):
|
def create_model(self, protocol_name: str, api_key: str, url: str | None, model_id: str):
|
||||||
|
"""
|
||||||
|
创建agent的模型对象
|
||||||
|
|
||||||
|
Args:
|
||||||
|
protocol_name: 协议名称,如openai,gemini等,应当为_model_mapping的键
|
||||||
|
api_key: api调用令牌
|
||||||
|
url: api调用的url
|
||||||
|
model_id: 模型名
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
应当返回一个可以作为Agent类model参数的对象
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
当protocol_name在_models_mapping中不存在,抛出ValueError错误
|
||||||
|
"""
|
||||||
if protocol_name not in self._models_mapping:
|
if protocol_name not in self._models_mapping:
|
||||||
raise ValueError(f"不支持的协议类型: {protocol_name}")
|
raise ValueError(f"不支持的协议类型: {protocol_name}")
|
||||||
model_class, provider_class = self._models_mapping[protocol_name]
|
model_class, provider_class = self._models_mapping[protocol_name]
|
||||||
|
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
from pretor.adapter.model_adapter.model_provider.base_provider import Provider, ProviderArgs
|
|
||||||
from pretor.adapter.model_adapter.model_provider.openai_provider import OpenAIProvider
|
|
||||||
from pretor.adapter.model_adapter.model_provider.gemini_provider import GeminiProvider
|
|
||||||
from pretor.adapter.model_adapter.model_provider.claude_provider import ClaudeProvider
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
class Provider(BaseModel):
|
|
||||||
provider_title: str
|
|
||||||
provider_url: str
|
|
||||||
provider_apikey: str
|
|
||||||
provider_models: List[str]
|
|
||||||
provider_type: str
|
|
||||||
|
|
||||||
class ProviderArgs(BaseModel):
|
|
||||||
provider_title: str
|
|
||||||
provider_url: str
|
|
||||||
provider_apikey: str
|
|
||||||
|
|
||||||
class BaseProvider(ABC):
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
async def create_model(provider_args: ProviderArgs) -> Provider:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
async def _load_models(provider_args: ProviderArgs) -> List[str]:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def _return_provider(provider_args: ProviderArgs, provider_models: List[str]) -> Provider:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,57 +1,40 @@
|
||||||
from pretor.adapter.model_adapter._agent_factory import AgentFactory
|
|
||||||
from pretor.adapter.model_adapter.model_provider import Provider, ProviderArgs, OpenAIProvider,GeminiProvider, ClaudeProvider
|
|
||||||
from pydantic_ai import Agent
|
|
||||||
import httpx
|
|
||||||
from pretor.utils.error import ModelNotExistError, ProviderNotExistError
|
|
||||||
from loguru import logger
|
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
class ProviderManager:
|
|
||||||
def __init__(self):
|
|
||||||
self._provider_mapper = {"openai": OpenAIProvider, "gemini": GeminiProvider, "claude": ClaudeProvider}
|
|
||||||
self._agent_factory = AgentFactory()
|
|
||||||
self.provider_register = {}
|
|
||||||
|
|
||||||
async def add_provider(self, provider_type: str, provider_title: str, provider_url: str, provider_apikey: str) -> None:
|
|
||||||
"""
|
|
||||||
add_provider方法,注册供应商适配器
|
|
||||||
:param provider_type: 注册商接口类型,目前只支持openai,gemini和claude接口
|
|
||||||
:param provider_title: 供应商名称,为供应商提供的别名
|
|
||||||
:param provider_url: 供应商url
|
|
||||||
:param provider_apikey: 供应商所需要的apikey
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
provider_args: ProviderArgs = ProviderArgs(provider_title=provider_title, provider_url=provider_url, provider_apikey=provider_apikey)
|
|
||||||
try:
|
|
||||||
if provider_type not in self._provider_mapper.keys():
|
|
||||||
logger.warning(f"Provider type {provider_type} is not supported.")
|
|
||||||
return None
|
|
||||||
provider_class = self._provider_mapper.get(provider_type)
|
|
||||||
provider: Provider = await provider_class.create_model(provider_args)
|
|
||||||
self.provider_register[provider_title] = provider
|
|
||||||
logger.info(f"已添加适配器{provider_title}")
|
|
||||||
except httpx.RequestError as e:
|
|
||||||
logger.warning(f"[{provider_args.provider_title}] 网络请求异常: {e}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"[{provider_args.provider_title}] 解析模型列表时发生错误: {e}")
|
|
||||||
|
|
||||||
def create_agent(self, agent_name: str, system_prompt: str, provider_title: str, model_id: str) -> Agent:
|
def create_agent(self, agent_name: str,
|
||||||
|
system_prompt: str,
|
||||||
|
provider_title: str,
|
||||||
|
model_id: str,
|
||||||
|
output_type: ResponseModel,
|
||||||
|
deps_type: DepsModel) -> Agent:
|
||||||
"""
|
"""
|
||||||
create_agent方法,将保存的适配器转化为agent对象并返回
|
create_agent方法,将保存的适配器转化为agent对象并返回
|
||||||
:param agent_name: agent名字,代表实例化个体起的名字
|
|
||||||
:param system_prompt: 系统提示词,给llm的系统提示词
|
Args:
|
||||||
:param provider_title: 供应商名称
|
agent_name: agent名字,代表实例化个体起的名字
|
||||||
:param model_id: 模型Id,实例化agent所输入的model_id
|
system_prompt: 系统提示词,给llm的系统提示词
|
||||||
:return:
|
provider_title: 供应商名称
|
||||||
|
model_id: 模型Id,实例化agent所输入的model_id
|
||||||
|
output_type: 输出格式,实例化agent后,对应llm所应当输出的格式
|
||||||
|
deps_type: 依赖格式,输入llm的格式
|
||||||
|
Returns:
|
||||||
|
一个pydanticAI的Agent对象,包含对应的apikey,url,model_id等信息,应当挂载到individual类的agent属性下
|
||||||
|
Raises:
|
||||||
|
ProviderNotExistError 当在provider_register属性里找不到供应商的自定义名称时抛出
|
||||||
|
ModelNotExistError 在获取的provider的模型列表中找不到输入的model_id抛出
|
||||||
"""
|
"""
|
||||||
if provider_title not in self.provider_register:
|
if provider_title not in self.provider_register:
|
||||||
raise ProviderNotExistError("提供商不存在")
|
raise ProviderNotExistError("提供商不存在")
|
||||||
provider = self.provider_register[provider_title]
|
provider = self.provider_register[provider_title]
|
||||||
if model_id not in provider.provider_models:
|
if model_id not in provider.provider_models:
|
||||||
raise ModelNotExistError("模型不存在")
|
raise ModelNotExistError("模型不存在")
|
||||||
model = self._agent_factory.create_model(provider.provider_type, provider.provider_apikey, provider.provider_url, model_id)
|
model = self._agent_factory.create_model(provider.provider_type,
|
||||||
agent = Agent(model=model,name=agent_name,system_prompt=system_prompt)
|
provider.provider_apikey,
|
||||||
|
provider.provider_url,
|
||||||
|
model_id)
|
||||||
|
agent = Agent(model=model,
|
||||||
|
name=agent_name,
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
output_type=output_type,
|
||||||
|
deps_type=deps_type)
|
||||||
return agent
|
return agent
|
||||||
|
|
||||||
def get_provider_list(self) -> Dict[str, Provider]:
|
|
||||||
return self.provider_register
|
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
from fastapi import APIRouter, Request
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from pretor.utils.access import Accessor
|
||||||
|
|
||||||
|
auth_router = APIRouter(prefix="/api/v1/auth", tags=["auth"])
|
||||||
|
|
||||||
|
class UserRegister(BaseModel):
|
||||||
|
user_name: str
|
||||||
|
password: str
|
||||||
|
|
||||||
|
@auth_router.post("/register")
|
||||||
|
async def create_user(user_register: UserRegister, request: Request):
|
||||||
|
postgres_database = request.app.state.postgres_database
|
||||||
|
hashed_password = Accessor.hash_password(user_register.password)
|
||||||
|
user = await postgres_database.auth_database.add_user.remote(user_register.user_name, hashed_password)
|
||||||
|
return {"message": "success", "user_id": user.user_id}
|
||||||
|
|
||||||
|
class UserLogin(BaseModel):
|
||||||
|
user_name: str
|
||||||
|
password: str
|
||||||
|
|
||||||
|
@auth_router.post("/login")
|
||||||
|
async def login_user(user_login: UserLogin, request: Request):
|
||||||
|
postgres_database = request.app.state.postgres_database
|
||||||
|
user = postgres_database.auth_database.login_user.remote(user_login.user_name)
|
||||||
|
if user.user_name != user_login.user_name:
|
||||||
|
pass
|
||||||
|
token = Accessor.login_hashed_password(user, user_login.password)
|
||||||
|
return {"message":"success", "token":token}
|
||||||
|
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
from .frontend import client_router
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
import datetime
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from ulid import ULID
|
||||||
|
from typing import Dict, List
|
||||||
|
from pretor.core.workflow.workflow import PretorWorkflow
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
class PretorEvent(BaseModel):
|
||||||
|
event_id: str = Field(default_factory=lambda: str(ULID()), description="事件的唯一标识符")
|
||||||
|
platform: str = Field(description="消息来源的平台")
|
||||||
|
user_id: str = Field(description="用户id")
|
||||||
|
user_name: str = Field(description="用户名")
|
||||||
|
create_time: str = Field(default_factory=lambda: str(datetime.datetime.now(datetime.timezone.utc).isoformat()),
|
||||||
|
description="事件创建时间")
|
||||||
|
message: str = Field(description="用户发来的消息")
|
||||||
|
attachment: Dict[str, str] | None = Field(default=None,description="附件")
|
||||||
|
#--------------------------------------------------------------------------------------------------------------
|
||||||
|
workflow: PretorWorkflow | None = Field(default=None,description="工作流")
|
||||||
|
pending_queue: asyncio.Queue[str] | None= Field(default=None,description="待处理队列")
|
||||||
|
receive_queue: asyncio.Queue[str] | None = Field(default=None,description="待接收队列")
|
||||||
|
|
@ -0,0 +1,45 @@
|
||||||
|
from fastapi import APIRouter, Request, Depends, HTTPException, status, WebSocket, WebSocketDisconnect
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from pretor.utils.access import Accessor, TokenData
|
||||||
|
from pretor.api.platform.event import PretorEvent
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
client_router = APIRouter(prefix="/api/v1/adapter/client", tags=["client"])
|
||||||
|
|
||||||
|
class Message(BaseModel):
|
||||||
|
message: str
|
||||||
|
|
||||||
|
@client_router.post("")
|
||||||
|
async def create_message(message: Message,
|
||||||
|
request: Request,
|
||||||
|
token_date: TokenData = Depends(Accessor.get_current_user)):
|
||||||
|
logger.info(f"收到消息,来源:客户端,消息内容:{message.message}")
|
||||||
|
event = PretorEvent(platform="client",
|
||||||
|
user_id=str(token_date.user_id),
|
||||||
|
user_name=token_date.user_name,
|
||||||
|
message=message.message)
|
||||||
|
supervisory_node = request.app.state.supervisory_node
|
||||||
|
message = await supervisory_node.working.remote(event)
|
||||||
|
if message == "任务已创建":
|
||||||
|
global_state_machine = request.app.state.global_state_machine
|
||||||
|
global_state_machine.add.remote(event)
|
||||||
|
return {"message": event.event_id}
|
||||||
|
elif message == "未知相应类型":
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="模型回复错误")
|
||||||
|
else:
|
||||||
|
return {"message": message}
|
||||||
|
|
||||||
|
@client_router.websocket("/ws/{event_id}")
|
||||||
|
async def websocket_endpoint(websocket: WebSocket, event_id: str):
|
||||||
|
await websocket.accept()
|
||||||
|
global_state_machine = websocket.app.state.global_state_machine
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
await websocket.send_text(await global_state_machine.get_pending(event_id))
|
||||||
|
response = await websocket.receive_text()
|
||||||
|
await global_state_machine.put_received(event_id, response)
|
||||||
|
except WebSocketDisconnect:
|
||||||
|
pass
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
from fastapi import APIRouter, Request, Depends
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Literal
|
||||||
|
from pretor.utils.access import TokenData, Accessor
|
||||||
|
from typing import Dict
|
||||||
|
from pretor.core.global_state_machine.model_provider.base_provider import Provider
|
||||||
|
|
||||||
|
|
||||||
|
provider_router = APIRouter(prefix="/api/v1/provider", tags=["provider"])
|
||||||
|
|
||||||
|
class ProviderRegister(BaseModel):
|
||||||
|
provider_type: Literal["openai", "gemini", "claude"]
|
||||||
|
provider_title: str
|
||||||
|
provider_url: str
|
||||||
|
provider_apikey: str
|
||||||
|
|
||||||
|
@provider_router.post("")
|
||||||
|
async def create_provider(provider_register: ProviderRegister,
|
||||||
|
request: Request,
|
||||||
|
token_data: TokenData = Depends(Accessor.get_current_user)) -> None:
|
||||||
|
global_state_machine = request.app.state.global_state_machine
|
||||||
|
await global_state_machine.add_provider.remote(provider_type=provider_register.provider_type,
|
||||||
|
provider_title=provider_register.provider_title,
|
||||||
|
provider_url=provider_register.provider_url,
|
||||||
|
provider_apikey=provider_register.provider_apikey,
|
||||||
|
provider_owner=token_data.user_id)
|
||||||
|
|
||||||
|
|
||||||
|
@provider_router.get("/list")
|
||||||
|
async def get_provider_list(request: Request,
|
||||||
|
_: TokenData = Depends(Accessor.get_current_user)) -> Dict[str, Provider]:
|
||||||
|
global_state_machine = request.app.state.global_state_machine
|
||||||
|
provider_list: Dict[str, Provider] = await global_state_machine.get_provider_list.remote()
|
||||||
|
return {"provider_list": provider_list}
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
import ray
|
||||||
|
from typing import Dict
|
||||||
|
from fastapi import FastAPI,WebSocket
|
||||||
|
from pretor.core.database.postgres import PostgresDatabase
|
||||||
|
from pretor.core.global_state_machine.global_state_machine import GlobalStateMachine
|
||||||
|
from pretor.core.individual.supervisory_node.supervisory_node import SupervisoryNode
|
||||||
|
|
||||||
|
|
||||||
|
@ray.remote
|
||||||
|
class PretorGateway:
|
||||||
|
gateway: Dict[str, WebSocket]
|
||||||
|
def __init__(self,
|
||||||
|
postgres_database: PostgresDatabase,
|
||||||
|
global_state_machine: GlobalStateMachine,
|
||||||
|
supervisory_node: SupervisoryNode,):
|
||||||
|
self.app = FastAPI()
|
||||||
|
self.gateway = {}
|
||||||
|
self.app = FastAPI()
|
||||||
|
|
||||||
|
self.app.state.postgres_database = postgres_database
|
||||||
|
self.app.state.global_state_machine = global_state_machine
|
||||||
|
self.app.state.supervisory = supervisory_node
|
||||||
|
|
||||||
|
async def server_run(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from sqlalchemy.exc import IntegrityError, OperationalError
|
from sqlalchemy.exc import IntegrityError, OperationalError
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
from pretor.utils.error import UserNotExistError
|
||||||
|
|
||||||
def database_exception(func):
|
def database_exception(func):
|
||||||
async def wrapper(*args, **kwargs):
|
async def wrapper(*args, **kwargs):
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,54 @@
|
||||||
|
from sqlmodel import SQLModel, Field, select
|
||||||
|
from typing import Optional, List
|
||||||
|
import json
|
||||||
|
|
||||||
|
class WorkflowRecord(SQLModel, table=True):
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
workflow_id: str = Field(index=True)
|
||||||
|
workflow_data_json: str
|
||||||
|
|
||||||
|
class MemoryRecord(SQLModel, table=True):
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
memory_text: str
|
||||||
|
embedding: List[float] = Field(sa_column_kwargs={"type_": "VECTOR"}) # Requires pgvector extension setup in DB
|
||||||
|
|
||||||
|
class MemoryRAG:
|
||||||
|
def __init__(self, async_session_maker):
|
||||||
|
self.async_session_maker = async_session_maker
|
||||||
|
|
||||||
|
async def save_workflow(self, workflow_id: str, workflow_data: dict):
|
||||||
|
async with self.async_session_maker() as session:
|
||||||
|
record = WorkflowRecord(
|
||||||
|
workflow_id=workflow_id,
|
||||||
|
workflow_data_json=json.dumps(workflow_data)
|
||||||
|
)
|
||||||
|
session.add(record)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(record)
|
||||||
|
return record
|
||||||
|
|
||||||
|
async def get_workflow(self, workflow_id: str):
|
||||||
|
async with self.async_session_maker() as session:
|
||||||
|
statement = select(WorkflowRecord).where(WorkflowRecord.workflow_id == workflow_id)
|
||||||
|
results = await session.execute(statement)
|
||||||
|
record = results.scalar_one_or_none()
|
||||||
|
if record:
|
||||||
|
return json.loads(record.workflow_data_json)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def add_memory(self, memory_text: str, embedding: List[float]):
|
||||||
|
async with self.async_session_maker() as session:
|
||||||
|
record = MemoryRecord(memory_text=memory_text, embedding=embedding)
|
||||||
|
session.add(record)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(record)
|
||||||
|
return record
|
||||||
|
|
||||||
|
async def retrieve_memory(self, query_embedding: List[float], limit: int = 5):
|
||||||
|
# Requires pgvector specific operations; simplified retrieval simulation here
|
||||||
|
async with self.async_session_maker() as session:
|
||||||
|
# A true pgvector query would use an ORDER BY using `<->` operator
|
||||||
|
# e.g. statement = select(MemoryRecord).order_by(MemoryRecord.embedding.l2_distance(query_embedding)).limit(limit)
|
||||||
|
statement = select(MemoryRecord).limit(limit)
|
||||||
|
results = await session.execute(statement)
|
||||||
|
return results.all()
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from pretor.core.database.table import Provider
|
||||||
|
from sqlmodel import select
|
||||||
|
from pretor.core.database.database_exception import database_exception
|
||||||
|
from pretor.core.global_state_machine.model_provider import Provider
|
||||||
|
|
||||||
|
class ProviderDatabase:
|
||||||
|
def __init__(self, async_session_maker):
|
||||||
|
self.async_session_maker = async_session_maker
|
||||||
|
|
||||||
|
@database_exception
|
||||||
|
async def get_provider(self) -> List[Provider]:
|
||||||
|
async with self.async_session_maker as session:
|
||||||
|
statement = select(Provider)
|
||||||
|
results = await session.exec(statement).all()
|
||||||
|
providers = [Provider(provider_title=provider.provider_title,
|
||||||
|
provider_url=provider.provider_url,
|
||||||
|
provider_apikey=provider.provider_apikey,
|
||||||
|
provider_models=provider.provider_models,
|
||||||
|
provider_type=provider.provider_type) for provider in results]
|
||||||
|
return providers
|
||||||
|
|
||||||
|
@database_exception
|
||||||
|
async def add_provider(self, **kwargs) -> None:
|
||||||
|
async with self.async_session_maker as session:
|
||||||
|
provider = Provider(**kwargs)
|
||||||
|
await session.add(provider)
|
||||||
|
|
@ -0,0 +1,54 @@
|
||||||
|
from pretor.core.database.table import User
|
||||||
|
from sqlmodel import select
|
||||||
|
from pretor.utils.error import UserNotExistError, UserPasswordError
|
||||||
|
from pretor.core.database.database_exception import database_exception
|
||||||
|
|
||||||
|
class AuthDatabase:
|
||||||
|
def __init__(self, async_session_maker):
|
||||||
|
self.async_session_maker = async_session_maker
|
||||||
|
|
||||||
|
@database_exception
|
||||||
|
async def add_user(self, user_name: str, hashed_password: str) -> User:
|
||||||
|
user = User(user_name=user_name, hashed_password=hashed_password)
|
||||||
|
async with self.async_session_maker as session:
|
||||||
|
session.add(user)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
@database_exception
|
||||||
|
async def change_password(self, user_name, old_password, new_password) -> User:
|
||||||
|
async with self.async_session_maker() as session:
|
||||||
|
statement = select(User).where(User.user_name == user_name)
|
||||||
|
results = await session.exec(statement)
|
||||||
|
user = results.scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
raise UserNotExistError()
|
||||||
|
if old_password != user.hashed_password:
|
||||||
|
raise UserPasswordError()
|
||||||
|
user.hashed_password = new_password
|
||||||
|
session.add(user)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
@database_exception
|
||||||
|
async def delete_user(self, user_name: str) -> None:
|
||||||
|
async with self.async_session_maker() as session:
|
||||||
|
statement = select(User).where(User.user_name == user_name)
|
||||||
|
results = await session.exec(statement)
|
||||||
|
user = results.scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
raise UserNotExistError()
|
||||||
|
session.delete(user)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
@database_exception
|
||||||
|
async def login_user(self, user_name: str) -> str:
|
||||||
|
async with self.async_session_maker() as session:
|
||||||
|
statement = select(User).where(User.user_name == user_name)
|
||||||
|
results = await session.exec(statement)
|
||||||
|
user = results.scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
raise UserNotExistError()
|
||||||
|
return user
|
||||||
|
|
@ -1,11 +1,12 @@
|
||||||
|
import os
|
||||||
|
|
||||||
import ray
|
import ray
|
||||||
from pretor.core.database.table import User
|
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
from sqlmodel import SQLModel, select
|
from sqlmodel import SQLModel
|
||||||
from pretor.utils.error import UserNotExistError, UserPasswordError
|
|
||||||
import os
|
from pretor.core.database.module.user import AuthDatabase
|
||||||
from pretor.core.database.database_exception import database_exception
|
from pretor.core.database.module.provider import ProviderDatabase
|
||||||
|
|
||||||
@ray.remote
|
@ray.remote
|
||||||
class PostgresDatabase:
|
class PostgresDatabase:
|
||||||
|
|
@ -19,53 +20,9 @@ class PostgresDatabase:
|
||||||
self.async_engine = create_async_engine(database_url, echo=True)
|
self.async_engine = create_async_engine(database_url, echo=True)
|
||||||
self.async_session_maker = sessionmaker(self.async_engine, class_=AsyncSession, expire_on_commit=False)
|
self.async_session_maker = sessionmaker(self.async_engine, class_=AsyncSession, expire_on_commit=False)
|
||||||
|
|
||||||
|
self.auth_database = AuthDatabase(self.async_session_maker)
|
||||||
|
self.provider_database = ProviderDatabase(self.async_session_maker)
|
||||||
|
|
||||||
async def init_db(self) -> None:
|
async def init_db(self) -> None:
|
||||||
async with self.async_engine.begin() as conn:
|
async with self.async_engine.begin() as conn:
|
||||||
await conn.run_sync(SQLModel.metadata.create_all)
|
await conn.run_sync(SQLModel.metadata.create_all)
|
||||||
|
|
||||||
@database_exception
|
|
||||||
async def add_user(self, user_name: str, hashed_password: str) -> User:
|
|
||||||
user = User(user_name=user_name, hashed_password=hashed_password)
|
|
||||||
async with self.async_session_maker as session:
|
|
||||||
session.add(user)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(user)
|
|
||||||
return user
|
|
||||||
|
|
||||||
@database_exception
|
|
||||||
async def change_password(self, user_name, old_password, new_password) -> User:
|
|
||||||
async with self.async_session_maker() as session:
|
|
||||||
statement = select(User).where(User.user_name == user_name)
|
|
||||||
results = await session.exec(statement)
|
|
||||||
user = results.scalar_one_or_none()
|
|
||||||
if user is None:
|
|
||||||
raise UserNotExistError()
|
|
||||||
if old_password != user.hashed_password:
|
|
||||||
raise UserPasswordError()
|
|
||||||
user.hashed_password = new_password
|
|
||||||
session.add(user)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(user)
|
|
||||||
return user
|
|
||||||
|
|
||||||
@database_exception
|
|
||||||
async def delete_user(self, user_name: str) -> None:
|
|
||||||
async with self.async_session_maker() as session:
|
|
||||||
statement = select(User).where(User.user_name == user_name)
|
|
||||||
results = await session.exec(statement)
|
|
||||||
user = results.scalar_one_or_none()
|
|
||||||
if user is None:
|
|
||||||
raise UserNotExistError()
|
|
||||||
session.delete(user)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
@database_exception
|
|
||||||
async def get_user_password(self, user_name: str) -> str:
|
|
||||||
async with self.async_session_maker() as session:
|
|
||||||
statement = select(User).where(User.user_name == user_name)
|
|
||||||
results = await session.exec(statement)
|
|
||||||
user = results.scalar_one_or_none()
|
|
||||||
if user is None:
|
|
||||||
raise UserNotExistError()
|
|
||||||
return user.hashed_password
|
|
||||||
|
|
||||||
|
|
@ -1 +1,2 @@
|
||||||
from pretor.core.database.table.user import User
|
from pretor.core.database.table.user import User
|
||||||
|
from pretor.core.database.table.provider import Provider
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
from sqlmodel import SQLModel, Field
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
class Provider(SQLModel):
|
||||||
|
__tablename__ = "provider"
|
||||||
|
provider_title: str = Field(primary_key=True)
|
||||||
|
provider_url: str
|
||||||
|
provider_apikey: str
|
||||||
|
provider_models: List[str]
|
||||||
|
provider_type: str
|
||||||
|
provider_owner: int
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from sqlmodel import SQLModel, Field, Column, String
|
from sqlmodel import SQLModel, Field
|
||||||
|
|
||||||
|
|
||||||
class User(SQLModel):
|
class User(SQLModel):
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,115 @@
|
||||||
|
import ray
|
||||||
|
from pretor.core.global_state_machine.provider_manager import ProviderManager
|
||||||
|
from pretor.core.global_state_machine.model_provider import Provider, ProviderArgs
|
||||||
|
import httpx
|
||||||
|
from loguru import logger
|
||||||
|
from typing import Dict, Literal
|
||||||
|
from pretor.core.database.postgres import PostgresDatabase
|
||||||
|
from pretor.api.platform.event import PretorEvent
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from pretor.core.workflow.workflow import PretorWorkflow
|
||||||
|
|
||||||
|
|
||||||
|
@ray.remote
|
||||||
|
class GlobalStateMachine:
|
||||||
|
def __init__(self, postgres_database: PostgresDatabase):
|
||||||
|
self.event_dict: Dict[int, PretorEvent] = {}
|
||||||
|
self.global_provider_manager = ProviderManager(postgres_database)
|
||||||
|
self.postgres_database = postgres_database
|
||||||
|
|
||||||
|
###以下方法为event_dict方法
|
||||||
|
def add_event(self, event: PretorEvent) -> None:
|
||||||
|
event.pending_queue = asyncio.Queue()
|
||||||
|
event.receive_queue = asyncio.Queue()
|
||||||
|
self.event_dict[event.event_id] = event
|
||||||
|
|
||||||
|
def delete_event(self, event_id: str) -> None:
|
||||||
|
del self.event_dict[event_id]
|
||||||
|
|
||||||
|
def get_event(self, event_id: str) -> PretorEvent:
|
||||||
|
return self.event_dict.get("event_id", None)
|
||||||
|
|
||||||
|
def update_attachment(self, event_id: str, attachment: Dict[str, str]) -> None:
|
||||||
|
self.event_dict[event_id].attachment = attachment
|
||||||
|
|
||||||
|
def update_workflow(self, event_id: str, workflow: PretorWorkflow) -> None:
|
||||||
|
self.event_dict[event_id].workflow = workflow
|
||||||
|
|
||||||
|
async def put_pending(self, event_id, item) -> None:
|
||||||
|
await self.event_dict[event_id].pending_queue.put(item)
|
||||||
|
|
||||||
|
async def get_pending(self, event_id) -> str:
|
||||||
|
return await self.event_dict[event_id].pending_queue.get()
|
||||||
|
|
||||||
|
async def put_received(self, event_id, item) -> None:
|
||||||
|
await self.event_dict[event_id].receive_queue.put(item)
|
||||||
|
|
||||||
|
async def get_receive_queue(self, event_id) -> str:
|
||||||
|
return await self.event_dict[event_id].receive_queue.get()
|
||||||
|
|
||||||
|
|
||||||
|
###以下方法为global_provider_manager方法
|
||||||
|
async def add_provider(self, provider_type: Literal["openai", "gemini", "claude"],
|
||||||
|
provider_title: str,
|
||||||
|
provider_url: str,
|
||||||
|
provider_apikey: str,
|
||||||
|
provider_owner: int) -> None:
|
||||||
|
"""
|
||||||
|
add_provider方法,注册供应商适配器(provider_manager方法)
|
||||||
|
|
||||||
|
Args
|
||||||
|
provider_type: 注册商接口类型,目前只支持openai,gemini和claude接口
|
||||||
|
provider_title: 供应商名称,为供应商提供的别名
|
||||||
|
provider_url: 供应商url
|
||||||
|
provider_apikey: 供应商所需要的apikey
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
"""
|
||||||
|
provider_args: ProviderArgs = ProviderArgs(provider_title=provider_title,
|
||||||
|
provider_url=provider_url,
|
||||||
|
provider_apikey=provider_apikey,
|
||||||
|
provider_owner=provider_owner)
|
||||||
|
try:
|
||||||
|
if provider_type not in self.global_provider_manager.provider_mapper.keys():
|
||||||
|
logger.warning(f"Provider type {provider_type} is not supported.")
|
||||||
|
return None
|
||||||
|
provider_class = self.global_provider_manager.provider_mapper.get(provider_type)
|
||||||
|
provider: Provider = await provider_class.create_model(provider_args)
|
||||||
|
|
||||||
|
provider.provider_owner = provider_owner
|
||||||
|
|
||||||
|
self.global_provider_manager.provider_register[provider_title] = provider
|
||||||
|
|
||||||
|
await self.postgres_database.provider_database.add_provider.remote(provider_title=provider.provider_title,
|
||||||
|
provider_url=provider.provider_url,
|
||||||
|
provider_apikey=provider.provider_apikey,
|
||||||
|
provider_models=provider.provider_models,
|
||||||
|
provider_type=provider.provider_type,
|
||||||
|
provider_owner=provider.provider_owner)
|
||||||
|
|
||||||
|
logger.info(f"已添加适配器{provider_title}")
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.warning(f"[{provider_args.provider_title}] 网络请求异常: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[{provider_args.provider_title}] 解析模型列表时发生错误: {e}")
|
||||||
|
|
||||||
|
def get_provider_list(self) -> Dict[str, Provider]:
|
||||||
|
"""
|
||||||
|
get_provider_list方法,获取注册表(provider_manager方法)
|
||||||
|
Returns:
|
||||||
|
返回provider_register属性字典
|
||||||
|
"""
|
||||||
|
return self.global_provider_manager.provider_register
|
||||||
|
|
||||||
|
def get_provider(self, provider_title) -> Provider:
|
||||||
|
"""
|
||||||
|
get_provider方法,获取供应商信息(provider_manager方法)
|
||||||
|
Args:
|
||||||
|
provider_title:provider名称
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Provider对象,返回注册在self.global_provider_manager.provider_register的供应商
|
||||||
|
"""
|
||||||
|
provider = self.global_provider_manager.provider_register.get(provider_title)
|
||||||
|
return provider
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
from pretor.core.global_state_machine.model_provider.base_provider import Provider, ProviderArgs
|
||||||
|
from pretor.core.global_state_machine.model_provider.openai_provider import OpenAIProvider
|
||||||
|
from pretor.core.global_state_machine.model_provider.gemini_provider import GeminiProvider
|
||||||
|
from pretor.core.global_state_machine.model_provider.claude_provider import ClaudeProvider
|
||||||
|
|
@ -0,0 +1,81 @@
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import List
|
||||||
|
from enum import Enum
|
||||||
|
class ProviderStatus(Enum, str):
|
||||||
|
UP = "up"
|
||||||
|
DOWN = "down"
|
||||||
|
|
||||||
|
class Provider(BaseModel):
|
||||||
|
provider_title: str
|
||||||
|
provider_url: str
|
||||||
|
provider_apikey: str
|
||||||
|
provider_models: List[str]
|
||||||
|
provider_type: str
|
||||||
|
provider_owner: int | None = None
|
||||||
|
provider_status: ProviderStatus = ProviderStatus.UP
|
||||||
|
|
||||||
|
class ProviderArgs(BaseModel):
|
||||||
|
provider_title: str
|
||||||
|
provider_url: str
|
||||||
|
provider_apikey: str
|
||||||
|
provider_owner: int
|
||||||
|
|
||||||
|
class BaseProvider(ABC):
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def create_provider(provider_args: ProviderArgs) -> Provider:
|
||||||
|
"""
|
||||||
|
创建一个供应商,传入provider_args参数,打包为一个Provider对象
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_args: 参数包,包含以下几个参数
|
||||||
|
provider_title: 供应商的别名
|
||||||
|
provider_url: 供应商的url
|
||||||
|
provider_apikey:供应商的apikey
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
返回一个Provider对象,由provider_manager管理
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def _load_models(provider_args: ProviderArgs) -> List[str]:
|
||||||
|
"""
|
||||||
|
加载模型列表
|
||||||
|
base_provider的字类应当按照供应商的api标准,向供应商的接口发送http请求从而或者供应商所提供的模型列表
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_args: 参数包,包含以下几个参数
|
||||||
|
provider_title: 供应商的别名
|
||||||
|
provider_url: 供应商的url
|
||||||
|
provider_apikey:供应商的apikey
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
返回一个列表,为http请求获取的模型列表
|
||||||
|
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def _return_provider(provider_args: ProviderArgs, provider_models: List[str]) -> Provider:
|
||||||
|
"""
|
||||||
|
包装Provider对象并返回
|
||||||
|
将provider_args和_load_models获取的方法包装为provider对象
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_args: 参数包,包含以下几个参数
|
||||||
|
provider_title: 供应商的别名
|
||||||
|
provider_url: 供应商的url
|
||||||
|
provider_apikey:供应商的apikey
|
||||||
|
|
||||||
|
provider_models: 模型列表,为该供应商包含的模型列表
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
返回一个Provider对象
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
from pretor.adapter.model_adapter.model_provider.base_provider import BaseProvider, Provider, ProviderArgs
|
from pretor.core.global_state_machine.model_provider.base_provider import BaseProvider, Provider, ProviderArgs
|
||||||
import httpx
|
import httpx
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
class ClaudeProvider(BaseProvider):
|
class ClaudeProvider(BaseProvider):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def create_model(provider_args: ProviderArgs) -> Provider:
|
async def create_provider(provider_args: ProviderArgs) -> Provider:
|
||||||
provider_models: List[str] = await ClaudeProvider._load_models(provider_args)
|
provider_models: List[str] = await ClaudeProvider._load_models(provider_args)
|
||||||
provider: Provider = ClaudeProvider._return_provider(provider_args, provider_models)
|
provider: Provider = ClaudeProvider._return_provider(provider_args, provider_models)
|
||||||
return provider
|
return provider
|
||||||
|
|
@ -33,7 +33,7 @@ class ClaudeProvider(BaseProvider):
|
||||||
return ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229", "claude-3-haiku-20240307"]
|
return ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229", "claude-3-haiku-20240307"]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"[{provider_args.provider_title}] 获取 Claude 模型列表错误: {e}")
|
print(f"[{provider_args.provider_title}] 获取 Claude 模型列表错误: {e}")
|
||||||
return ["claude-3-5-sonnet-20240620"]
|
return []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _return_provider(provider_args: ProviderArgs, provider_models: List[str]) -> Provider:
|
def _return_provider(provider_args: ProviderArgs, provider_models: List[str]) -> Provider:
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
from pretor.adapter.model_adapter.model_provider.base_provider import BaseProvider, Provider, ProviderArgs
|
from pretor.core.global_state_machine.model_provider.base_provider import BaseProvider, Provider, ProviderArgs
|
||||||
import httpx
|
import httpx
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
class GeminiProvider(BaseProvider):
|
class GeminiProvider(BaseProvider):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def create_model(provider_args: ProviderArgs) -> Provider:
|
async def create_provider(provider_args: ProviderArgs) -> Provider:
|
||||||
provider_models: List[str] = await GeminiProvider._load_models(provider_args)
|
provider_models: List[str] = await GeminiProvider._load_models(provider_args)
|
||||||
provider: Provider = GeminiProvider._return_provider(provider_args, provider_models)
|
provider: Provider = GeminiProvider._return_provider(provider_args, provider_models)
|
||||||
return provider
|
return provider
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
from pretor.adapter.model_adapter.model_provider.base_provider import BaseProvider, Provider, ProviderArgs
|
from pretor.core.global_state_machine.model_provider.base_provider import BaseProvider, Provider, ProviderArgs
|
||||||
import httpx
|
import httpx
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
class OpenAIProvider(BaseProvider):
|
class OpenAIProvider(BaseProvider):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def create_model(provider_args: ProviderArgs) -> Provider:
|
async def create_provider(provider_args: ProviderArgs) -> Provider:
|
||||||
provider_models: List[str] = await OpenAIProvider._load_models(provider_args)
|
provider_models: List[str] = await OpenAIProvider._load_models(provider_args)
|
||||||
provider: Provider = OpenAIProvider._return_provider(provider_args, provider_models)
|
provider: Provider = OpenAIProvider._return_provider(provider_args, provider_models)
|
||||||
return provider
|
return provider
|
||||||
|
|
@ -0,0 +1,25 @@
|
||||||
|
from pretor.core.global_state_machine.model_provider import Provider, OpenAIProvider,GeminiProvider, ClaudeProvider
|
||||||
|
from typing import Dict, Type
|
||||||
|
|
||||||
|
class ProviderManager:
|
||||||
|
"""
|
||||||
|
模型供应商管理器 (ProviderManager)。
|
||||||
|
负责维护不同的 LLM 协议适配器,提供从配置注册到 Agent 实例化的全生命周期管理。
|
||||||
|
"""
|
||||||
|
# --- 类属性显式标注 (IDE 友好) ---
|
||||||
|
provider_mapper: Dict[str, Type[Provider]]
|
||||||
|
"""协议映射表:键为协议名(如 'openai'),值为对应的 Provider 类。"""
|
||||||
|
|
||||||
|
provider_register: Dict[str, Provider]
|
||||||
|
"""供应商注册表:键为用户自定义别名,值为已实例化的 Provider 对象。"""
|
||||||
|
def __init__(self, postgres):
|
||||||
|
self.provider_mapper = {"openai": OpenAIProvider,
|
||||||
|
"gemini": GeminiProvider,
|
||||||
|
"claude": ClaudeProvider}
|
||||||
|
self.provider_register = {}
|
||||||
|
self._load_provider_register(postgres)
|
||||||
|
|
||||||
|
def _load_provider_register(self, postgres) -> None:
|
||||||
|
providers = postgres.provider_database.get_provider.remote()
|
||||||
|
for provider in providers:
|
||||||
|
self.provider_register[provider.title] = provider
|
||||||
|
|
@ -0,0 +1,41 @@
|
||||||
|
import ray
|
||||||
|
from pydantic_ai import Agent
|
||||||
|
from pretor.core.workflow.workflow import PretorWorkflow, WorkStep, WorkerGroup
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
@ray.remote
|
||||||
|
class ConsciousnessNode:
|
||||||
|
def __init__(self, agent: Agent):
|
||||||
|
self.agent = agent
|
||||||
|
|
||||||
|
async def generate_workflow(self, template: dict, task_description: str) -> PretorWorkflow:
|
||||||
|
prompt = f"Given the template {template} and task '{task_description}', generate a list of actionable steps."
|
||||||
|
# Simulated parsing logic: in a real implementation we would parse structured output from the agent
|
||||||
|
# response = await self.agent.run(prompt)
|
||||||
|
|
||||||
|
wg = WorkerGroup(
|
||||||
|
name="default_squad",
|
||||||
|
primary_individual={"coder": 1},
|
||||||
|
composite_individual={}
|
||||||
|
)
|
||||||
|
|
||||||
|
steps = [
|
||||||
|
WorkStep(step=1, node="consciousness_node", action="analyze", desc="Analyze task details: " + task_description),
|
||||||
|
WorkStep(step=2, node="control_node", action="execute", desc="Execute default execution logic", input=["1"])
|
||||||
|
]
|
||||||
|
|
||||||
|
return PretorWorkflow(
|
||||||
|
title=f"Workflow for {task_description[:10]}",
|
||||||
|
workgroup_list=[wg],
|
||||||
|
work_link=steps
|
||||||
|
)
|
||||||
|
|
||||||
|
async def check_task(self, task_status: dict) -> bool:
|
||||||
|
if task_status.get("status") == "completed":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def process_complex_transaction(self, transaction_data: dict) -> dict:
|
||||||
|
prompt = f"Process the following complex transaction data and extract key entities: {transaction_data}"
|
||||||
|
result = await self.agent.run(prompt)
|
||||||
|
return {"processed": True, "analysis": result.data}
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
from pydantic import Field
|
||||||
|
from pretor.core.workflow.workflow import PretorWorkflow
|
||||||
|
from pretor.utils.agent_model import ResponseModel, DepsModel
|
||||||
|
|
||||||
|
class ConsciousnessNodeResponse(ResponseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ForControlNode(ConsciousnessNodeResponse):
|
||||||
|
|
||||||
|
|
||||||
|
class ForSystem(ConsciousnessNodeResponse):
|
||||||
|
workflow: PretorWorkflow
|
||||||
|
|
||||||
|
class ForSupervisoryNode(ConsciousnessNodeResponse):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class SupervisoryNodeDeps(DepsModel):
|
||||||
|
platform: str
|
||||||
|
user_name: str
|
||||||
|
time: str
|
||||||
|
|
@ -0,0 +1,33 @@
|
||||||
|
import ray
|
||||||
|
from pydantic_ai import Agent
|
||||||
|
from pretor.core.workflow.workflow import WorkStep
|
||||||
|
|
||||||
|
@ray.remote
|
||||||
|
class ControlNode:
|
||||||
|
def __init__(self, agent: Agent):
|
||||||
|
self.agent = agent
|
||||||
|
|
||||||
|
async def execute_step(self, step: WorkStep) -> WorkStep:
|
||||||
|
if step.action == "dispatch_model":
|
||||||
|
# The WorkStep schema from workflow manager may pass target info differently, assuming `input` here or simple `desc`
|
||||||
|
result = await self.dispatch_model({}, f"Execute task: {step.desc}")
|
||||||
|
step.output = result
|
||||||
|
elif step.action == "dispatch_tool":
|
||||||
|
# Simulating parsing of tool and args from `desc` or `input`
|
||||||
|
result = await self.dispatch_tool("simulated_tool", {"desc": step.desc})
|
||||||
|
step.output = str(result)
|
||||||
|
else:
|
||||||
|
result = await self.agent.run(f"Execute generic step: {step.model_dump()}")
|
||||||
|
step.output = result.data
|
||||||
|
|
||||||
|
step.status = "completed"
|
||||||
|
return step
|
||||||
|
|
||||||
|
async def dispatch_model(self, model_info: dict, prompt: str) -> str:
|
||||||
|
# In a real system, we'd select a smaller/specific model based on model_info
|
||||||
|
result = await self.agent.run(prompt)
|
||||||
|
return result.data
|
||||||
|
|
||||||
|
async def dispatch_tool(self, tool_name: str, tool_args: dict) -> dict:
|
||||||
|
# Simulated tool dispatch
|
||||||
|
return {"tool": tool_name, "status": "executed", "args": tool_args}
|
||||||
|
|
@ -0,0 +1,16 @@
|
||||||
|
from pydantic import Field
|
||||||
|
from pretor.utils.agent_model import ResponseModel, DepsModel
|
||||||
|
|
||||||
|
class SupervisoryNodeResponse(ResponseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ForUser(SupervisoryNodeResponse):
|
||||||
|
context: str = Field(...,description="对用户的回复,应当使用和蔼的语气进行回复")
|
||||||
|
|
||||||
|
class ForConsciousnessNode(SupervisoryNodeResponse):
|
||||||
|
workflow_template: str = Field(..., description="选择的工作流模板,应当为对应模板的name字段")
|
||||||
|
|
||||||
|
class SupervisoryNodeDeps(DepsModel):
|
||||||
|
platform: str
|
||||||
|
user_name: str
|
||||||
|
time: str
|
||||||
|
|
@ -0,0 +1,38 @@
|
||||||
|
import datetime
|
||||||
|
import ray
|
||||||
|
from typing import Union
|
||||||
|
from pretor.api.platform.event import PretorEvent
|
||||||
|
|
||||||
|
from pretor.core.individual.supervisory_node.response import ForConsciousnessNode, ForUser, SupervisoryNodeDeps
|
||||||
|
from pydantic_ai import RunContext
|
||||||
|
|
||||||
|
@ray.remote
|
||||||
|
class SupervisoryNode:
|
||||||
|
def __init__(self, provider_manager: ProviderManager, provider_title: str, model_id: str) -> None:
|
||||||
|
system_prompt: str = ""
|
||||||
|
output_type = Union[ForConsciousnessNode, ForUser]
|
||||||
|
self.agent = provider_manager.create_agent(agent_name="supervisory",
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
provider_title=provider_title,
|
||||||
|
model_id=model_id,
|
||||||
|
output_type=output_type,
|
||||||
|
deps_type=SupervisoryNodeDeps)
|
||||||
|
|
||||||
|
@self.agent.system_prompt
|
||||||
|
async def dynamic_prompt(ctx: RunContext[SupervisoryNodeDeps]):
|
||||||
|
return f"Context: Platform={ctx.deps.platform}, User={ctx.deps.user_name}, Time={ctx.deps.time}"
|
||||||
|
|
||||||
|
async def working(self, event: PretorEvent):
|
||||||
|
deps = SupervisoryNodeDeps(platform=event.platform,
|
||||||
|
user_name=event.user_name,
|
||||||
|
time=datetime.datetime.now())
|
||||||
|
result = await self.agent.run(event.message,deps=deps)
|
||||||
|
if isinstance(result.data, ForConsciousnessNode):
|
||||||
|
return "任务已创建"
|
||||||
|
elif isinstance(result.date, ForUser):
|
||||||
|
return result.data.content
|
||||||
|
else:
|
||||||
|
return "未知响应类型"
|
||||||
|
|
||||||
|
async def router(self, ):
|
||||||
|
pass
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import ray
|
|
||||||
from pretor.core.pipeline.pipeline_router import PipelineRouter
|
|
||||||
from pretor.core.workflow_manager.workflow import PretorWorkflow
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
@ray.remote
|
|
||||||
class PretorPipeline:
|
|
||||||
def __init__(self):
|
|
||||||
self.pipeline = asyncio.Queue()
|
|
||||||
self.running =True
|
|
||||||
self.worker_group = []
|
|
||||||
async def running(self):
|
|
||||||
for i in range(10):
|
|
||||||
self.worker_group.append(await self.worker())
|
|
||||||
|
|
||||||
async def worker(self):
|
|
||||||
while True:
|
|
||||||
workflow = await self.pipeline.get()
|
|
||||||
try:
|
|
||||||
logger.info(f"{workflow.title}开始运行")
|
|
||||||
for work_item in workflow.work_link:
|
|
||||||
await PipelineRouter.router(workflow, work_item)
|
|
||||||
except:
|
|
||||||
logger.error(f"{workflow.title}遭受致命错误,已结束")
|
|
||||||
continue
|
|
||||||
|
|
||||||
async def submit_workflow(self, workflow: PretorWorkflow):
|
|
||||||
await self.pipeline.put(workflow)
|
|
||||||
logger.info(f"任务已进入受理队列,当前排队数: {self.pipeline.qsize()}")
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
import asyncio
|
|
||||||
|
|
||||||
class PipelineRouter:
|
|
||||||
@staticmethod
|
|
||||||
async def router(workflow):
|
|
||||||
pass
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
from abc import ABC,abstractmethod
|
from abc import ABC,abstractmethod
|
||||||
from pretor.core.workflow_manager.workflow import PretorWorkflow
|
from pretor.core.workflow.workflow import PretorWorkflow
|
||||||
|
|
||||||
|
|
||||||
class RunnableObject(ABC):
|
class RunnableObject(ABC):
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
from typing import List, Optional, Union, Literal, Dict, Any
|
from typing import List, Optional, Union, Literal, Dict, Any
|
||||||
from pydantic import BaseModel, Field, model_validator
|
from pydantic import BaseModel, Field, model_validator
|
||||||
from ulid import ULID
|
|
||||||
from pretor.core.protocol.demand_protocol import DemandProtocol
|
from pretor.core.protocol.demand_protocol import DemandProtocol
|
||||||
|
|
||||||
NodeType = Literal[
|
NodeType = Literal[
|
||||||
"consciousness_node", "control_node", "supervisory_node",
|
"consciousness_node", "control_node", "supervisory_node",
|
||||||
"composite_individual", "primary_individual"
|
"composite_individual", "primary_individual"
|
||||||
|
|
@ -16,8 +16,8 @@ class WorkStep(BaseModel):
|
||||||
node: NodeType = Field(..., description="负责执行的节点类型")
|
node: NodeType = Field(..., description="负责执行的节点类型")
|
||||||
action: str = Field(..., description="执行的原子动作")
|
action: str = Field(..., description="执行的原子动作")
|
||||||
desc: str = Field(..., description="动作细节的自然语言描述,包含人工规范指导")
|
desc: str = Field(..., description="动作细节的自然语言描述,包含人工规范指导")
|
||||||
input: Optional[Union[str, List[str]]] = Field(default=None, description="前置依赖输出")
|
inputs: Optional[Union[str, List[str]]] = Field(default=None, description="前置依赖输出")
|
||||||
output: Optional[str] = Field(default=None, description="当前步骤产出物变量名")
|
outputs: Optional[str] = Field(default=None, description="当前步骤产出物变量名")
|
||||||
logic_gate: Optional[LogicGate] = Field(default=None, description="逻辑跳转控制")
|
logic_gate: Optional[LogicGate] = Field(default=None, description="逻辑跳转控制")
|
||||||
status: Literal["waiting", "running", "completed", "failed"] = Field(
|
status: Literal["waiting", "running", "completed", "failed"] = Field(
|
||||||
default="waiting",
|
default="waiting",
|
||||||
|
|
@ -43,7 +43,7 @@ class PretorWorkflow(BaseModel):
|
||||||
workgroup_list: List[WorkerGroup] = Field(..., description="工作组资源编排列表")
|
workgroup_list: List[WorkerGroup] = Field(..., description="工作组资源编排列表")
|
||||||
work_link: List[WorkStep] = Field(..., description="工作链逻辑定义")
|
work_link: List[WorkStep] = Field(..., description="工作链逻辑定义")
|
||||||
# ---------------- 以下为系统级管控字段,LLM 无需关心 ---------------- #
|
# ---------------- 以下为系统级管控字段,LLM 无需关心 ---------------- #
|
||||||
trace_id: str = Field(default_factory=lambda: str(ULID()), description="系统自动生成的追溯ID")
|
trace_id: str | None = Field(description="系统自动生成的追溯ID")
|
||||||
version: str = Field(default="v1.0", description="系统协议版本号")
|
version: str = Field(default="v1.0", description="系统协议版本号")
|
||||||
command: Optional[str] = Field(default=None, description="触发此工作流的原始命令")
|
command: Optional[str] = Field(default=None, description="触发此工作流的原始命令")
|
||||||
output: Dict[str, Any] = Field(default_factory=dict, description="工作流最终产出结果")
|
output: Dict[str, Any] = Field(default_factory=dict, description="工作流最终产出结果")
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import json
|
import json
|
||||||
from pretor.core.workflow_manager.workflow_template_generator.workflow_template_generator import WorkflowTemplateGenerator
|
from pretor.core.workflow.workflow_template_generator.workflow_template_generator import WorkflowTemplateGenerator
|
||||||
from pretor.core.workflow_manager.workflow import PretorWorkflow
|
from pretor.core.workflow.workflow import PretorWorkflow
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
|
|
@ -11,7 +11,6 @@ class WorkflowManager:
|
||||||
self.template_path = Path("pretor/workflow_template")
|
self.template_path = Path("pretor/workflow_template")
|
||||||
self._load_workflow_template()
|
self._load_workflow_template()
|
||||||
|
|
||||||
|
|
||||||
def _load_workflow_template(self) -> None:
|
def _load_workflow_template(self) -> None:
|
||||||
for workflow_template_file in self.template_path.glob("*_workflow_template.json"):
|
for workflow_template_file in self.template_path.glob("*_workflow_template.json"):
|
||||||
with workflow_template_file.open("r",encoding="utf-8") as f:
|
with workflow_template_file.open("r",encoding="utf-8") as f:
|
||||||
|
|
@ -0,0 +1,150 @@
|
||||||
|
import ray
|
||||||
|
import asyncio
|
||||||
|
from pretor.core.workflow.workflow import PretorWorkflow, WorkStep
|
||||||
|
from loguru import logger
|
||||||
|
from typing import Optional, Dict, Union, Any, List
|
||||||
|
|
||||||
|
class WorkflowEngine:
|
||||||
|
def __init__(self, workflow: PretorWorkflow):
|
||||||
|
self.workflow: PretorWorkflow = workflow
|
||||||
|
# 局部上下文记忆(黑板):用于存放上一个步骤的 output,作为下一个步骤的 input
|
||||||
|
self.context_memory: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
def _get_step_by_id(self, step_id: int) -> Optional[WorkStep]:
|
||||||
|
"""根据序号获取当前步骤的定义"""
|
||||||
|
for step in self.workflow.work_link:
|
||||||
|
if step.step == step_id:
|
||||||
|
return step
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _prepare_inputs(self, inputs: Optional[Union[str, List[str]]]) -> Any:
|
||||||
|
"""从上下文中提取当前步骤所需的入参"""
|
||||||
|
if not inputs:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(inputs, str):
|
||||||
|
# 如果 input 是单一变量名,直接返回该变量的值
|
||||||
|
return self.context_memory.get(inputs)
|
||||||
|
|
||||||
|
if isinstance(inputs, list):
|
||||||
|
# 如果 input 是列表,返回包含这些变量名及其值的字典
|
||||||
|
return {k: self.context_memory.get(k) for k in inputs}
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def run(self):
|
||||||
|
logger.info(f"🚀 工作流引擎启动: {self.workflow.title} [Trace ID: {self.workflow.trace_id}]")
|
||||||
|
max_step = len(self.workflow.work_link)
|
||||||
|
|
||||||
|
# 核心调度循环:只要 step 在合法范围内,就一直执行
|
||||||
|
while 1 <= self.workflow.status.step <= max_step:
|
||||||
|
current_step_id = self.workflow.status.step
|
||||||
|
current_step = self._get_step_by_id(current_step_id)
|
||||||
|
|
||||||
|
if not current_step:
|
||||||
|
logger.error(f"严重错误:找不到步骤 {current_step_id},工作流强制终止。")
|
||||||
|
self.workflow.status.status = "failed"
|
||||||
|
break
|
||||||
|
|
||||||
|
logger.info(f"▶️ 开始执行 Step {current_step_id}: [{current_step.node}] -> {current_step.action}")
|
||||||
|
current_step.status = "running"
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 1. 准备依赖数据
|
||||||
|
step_input_data = self._prepare_inputs(current_step.inputs)
|
||||||
|
|
||||||
|
# 2. 派发给具体的 Ray 节点 (这也是整个架构的连接点)
|
||||||
|
# 这个方法会通过 Ray 去调用真正的模型或工具,并等待结果
|
||||||
|
step_result, is_success = await self._dispatch_to_node(current_step, step_input_data)
|
||||||
|
|
||||||
|
if is_success:
|
||||||
|
# 3. 记录产出物到全局黑板
|
||||||
|
if current_step.outputs:
|
||||||
|
self.context_memory[current_step.outputs] = step_result
|
||||||
|
logger.debug(f"Step {current_step_id} 产出已保存至变量: '{current_step.outputs}'")
|
||||||
|
|
||||||
|
current_step.status = "completed"
|
||||||
|
else:
|
||||||
|
logger.warning(f"Step {current_step_id} 执行遇到业务失败/驳回。")
|
||||||
|
current_step.status = "failed"
|
||||||
|
|
||||||
|
# 4. 根据执行成功与否处理逻辑门跳转
|
||||||
|
self._handle_logic_gate(current_step, is_success)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# 捕获系统级崩溃 (例如 Ray Actor 断联、网络异常)
|
||||||
|
logger.error(f"❌ Step {current_step_id} 发生系统级未捕获异常: {e}", exc_info=True)
|
||||||
|
current_step.status = "failed"
|
||||||
|
self.workflow.status.status = "failed"
|
||||||
|
# 发生未预期的崩溃,通常不再走业务 logic_gate,而是直接中断
|
||||||
|
break
|
||||||
|
|
||||||
|
logger.info(f"✅ 工作流 {self.workflow.title} 运行结束。")
|
||||||
|
self.workflow.output = self.context_memory
|
||||||
|
|
||||||
|
async def _dispatch_to_node(self, step: WorkStep, input_data: Any) -> tuple[Any, bool]:
|
||||||
|
"""
|
||||||
|
【重要集成点】这里是你将引擎与具体的 Actor (Consciousness, Control, Worker) 桥接的地方。
|
||||||
|
你需要在这里利用 Ray 来调用其他 Actor。
|
||||||
|
"""
|
||||||
|
logger.debug(f"正在向 {step.node} 节点发送动作 {step.action}...")
|
||||||
|
|
||||||
|
# 伪代码示例:你可以根据节点类型,获取对应的 Actor 句柄并调用
|
||||||
|
# if step.node == "control_node":
|
||||||
|
# result = await global_control_actor.execute.remote(step.action, step.desc, input_data)
|
||||||
|
# return result, True
|
||||||
|
# elif step.node == "primary_individual":
|
||||||
|
# # 可能是个 1B 写代码模型
|
||||||
|
# worker = get_worker_actor(step.action)
|
||||||
|
# result = await worker.run.remote(step.desc, input_data)
|
||||||
|
# return result, True
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
simulated_result = f"这是 {step.action} 动作产生的模拟结果"
|
||||||
|
is_success = True
|
||||||
|
return simulated_result, is_success
|
||||||
|
|
||||||
|
def _handle_logic_gate(self, step: WorkStep, is_success: bool):
|
||||||
|
"""处理逻辑门跳转,修改状态机指针"""
|
||||||
|
gate = step.logic_gate
|
||||||
|
|
||||||
|
if is_success:
|
||||||
|
if gate and gate.if_pass == "exit":
|
||||||
|
logger.info("命中 if_pass='exit',工作流被主动要求结束。")
|
||||||
|
self.workflow.status.step = 999999 # 设置一个越界值来终结 while 循环
|
||||||
|
else:
|
||||||
|
self.workflow.status.step += 1 # 默认成功则步数 +1,继续下一步
|
||||||
|
else:
|
||||||
|
if gate and gate.if_fail:
|
||||||
|
if gate.if_fail.startswith("jump_to_step_"):
|
||||||
|
target_step = int(gate.if_fail.split("_")[-1])
|
||||||
|
logger.warning(f"触发逻辑门分支!从 Step {step.step} 跳转至 Step {target_step}")
|
||||||
|
self.workflow.status.step = target_step
|
||||||
|
else:
|
||||||
|
logger.error(f"未知的 if_fail 格式: {gate.if_fail},终止工作流。")
|
||||||
|
self.workflow.status.step = 999999
|
||||||
|
else:
|
||||||
|
logger.error(f"步骤 {step.step} 失败且未配置 if_fail 兜底方案,工作流异常终止。")
|
||||||
|
self.workflow.status.step = 999999
|
||||||
|
|
||||||
|
@ray.remote
|
||||||
|
class WorkflowRunningEngine:
|
||||||
|
def __init__(self):
|
||||||
|
self.runner_engine = {}
|
||||||
|
self.workflow_queue: asyncio.Queue[PretorWorkflow] = None
|
||||||
|
|
||||||
|
async def run(self):
|
||||||
|
self.runner_engine = {
|
||||||
|
f"runner_{i}": asyncio.create_task(self.runner(i))
|
||||||
|
for i in range(10)
|
||||||
|
}
|
||||||
|
self.workflow_queue = asyncio.Queue()
|
||||||
|
|
||||||
|
async def runner(self,i: int):
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
workflow = await self.workflow_queue.get()
|
||||||
|
logger.info(f"WorkflowRunningEngine: runner_{i}接收工作流{workflow.trace_id}:{workflow.title}")
|
||||||
|
workflow_engine = WorkflowEngine(workflow)
|
||||||
|
await workflow_engine.run()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pretor.core.workflow_manager.workflow_template_generator.workflow_template import WorkflowTemplate
|
from pretor.core.workflow.workflow_template_generator.workflow_template import WorkflowTemplate
|
||||||
|
|
||||||
class WorkflowTemplateGenerator:
|
class WorkflowTemplateGenerator:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
from pydantic_ai import Agent
|
|
||||||
|
|
||||||
class ControlNode:
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
@ -0,0 +1,32 @@
|
||||||
|
import docker
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
class DockerSandboxTool:
|
||||||
|
def __init__(self):
|
||||||
|
try:
|
||||||
|
self.client = docker.from_env()
|
||||||
|
except Exception as e:
|
||||||
|
self.client = None
|
||||||
|
print(f"Failed to initialize Docker client: {e}")
|
||||||
|
|
||||||
|
def run_code(self, code: str, image: str = "python:3.9-slim") -> Dict[str, Any]:
|
||||||
|
if not self.client:
|
||||||
|
return {"error": "Docker client not initialized"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Simple python code runner in a container
|
||||||
|
container = self.client.containers.run(
|
||||||
|
image,
|
||||||
|
command=["python", "-c", code],
|
||||||
|
remove=True,
|
||||||
|
detach=False,
|
||||||
|
stdout=True,
|
||||||
|
stderr=True
|
||||||
|
)
|
||||||
|
# Depending on python version, container returns bytes directly
|
||||||
|
output = container.decode("utf-8") if isinstance(container, bytes) else container
|
||||||
|
return {"status": "success", "output": output}
|
||||||
|
except docker.errors.ContainerError as e:
|
||||||
|
return {"status": "error", "output": e.stderr.decode("utf-8")}
|
||||||
|
except Exception as e:
|
||||||
|
return {"status": "error", "error": str(e)}
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
from typing import List, Dict, Any
|
||||||
|
from sqlmodel import select
|
||||||
|
# Assuming MemoryRecord is accessible or passed, simulating direct pgvector call
|
||||||
|
|
||||||
|
class RAGTool:
|
||||||
|
def __init__(self, async_session_maker):
|
||||||
|
self.async_session_maker = async_session_maker
|
||||||
|
|
||||||
|
async def get_embedding(self, query: str) -> List[float]:
|
||||||
|
# Simulated embedding logic; in reality, this would call an embedding API
|
||||||
|
return [0.1] * 1536
|
||||||
|
|
||||||
|
async def retrieve(self, query: str, limit: int = 5) -> List[Dict[str, Any]]:
|
||||||
|
embedding = await self.get_embedding(query)
|
||||||
|
# We simulate the retrieve_memory call logic from MemoryRAG here
|
||||||
|
# Normally you would inject MemoryRAG or a repository, doing a simplistic return here
|
||||||
|
return [{"query": query, "simulated_results": f"Found results for {query} with vector {embedding[:2]}..."}]
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
import httpx
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
class WebCrawlerTool:
|
||||||
|
def __init__(self, timeout: int = 10):
|
||||||
|
self.timeout = timeout
|
||||||
|
|
||||||
|
async def crawl(self, url: str) -> Dict[str, Any]:
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||||
|
response = await client.get(url)
|
||||||
|
response.raise_for_status()
|
||||||
|
# Basic text extraction can happen here (e.g., stripping HTML tags manually or with a library later)
|
||||||
|
return {
|
||||||
|
"url": url,
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"content_preview": response.text[:500]
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {"url": url, "error": str(e)}
|
||||||
|
|
@ -0,0 +1,92 @@
|
||||||
|
import jwt
|
||||||
|
import os
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import Optional
|
||||||
|
from fastapi import HTTPException, status, Request
|
||||||
|
from pydantic import BaseModel, ValidationError
|
||||||
|
from pretor.core.database.table.user import User
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
|
||||||
|
class TokenData(BaseModel):
|
||||||
|
user_id: str
|
||||||
|
username: Optional[str] = None
|
||||||
|
exp: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
|
SECRET_KEY = os.getenv("SECRET_KEY", "your-fallback-secret-for-dev")
|
||||||
|
ALGORITHM = "HS256"
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 # 默认有效期 1 天
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
|
||||||
|
|
||||||
|
class Accessor:
|
||||||
|
@staticmethod
|
||||||
|
def _decode_token(token: str) -> TokenData:
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(
|
||||||
|
token,
|
||||||
|
SECRET_KEY,
|
||||||
|
algorithms=[ALGORITHM]
|
||||||
|
)
|
||||||
|
return TokenData(**payload)
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Token 已过期",
|
||||||
|
)
|
||||||
|
except (jwt.InvalidTokenError, ValidationError):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="无效的认证凭证",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _create_access_token(data: dict) -> str:
|
||||||
|
to_encode = data.copy()
|
||||||
|
expire = datetime.now(timezone.utc) + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
to_encode.update({"exp": int(expire.timestamp())})
|
||||||
|
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
return pwd_context.verify(plain_password, hashed_password)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_current_user(request: Request) -> TokenData:
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
if not auth_header or not auth_header.startswith("Bearer "):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="未提供认证头部",
|
||||||
|
)
|
||||||
|
token = auth_header.split(" ")[1]
|
||||||
|
return Accessor._decode_token(token)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def login_hashed_password(user: User, password: str) -> str:
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="用户不存在",
|
||||||
|
)
|
||||||
|
if not Accessor._verify_password(password, user.hashed_password):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="用户名或密码错误",
|
||||||
|
)
|
||||||
|
token_payload = {
|
||||||
|
"user_id": str(user.id), # 确保是字符串格式
|
||||||
|
"username": user.username
|
||||||
|
}
|
||||||
|
return Accessor._create_access_token(data=token_payload)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_password(password: str) -> str:
|
||||||
|
if not password:
|
||||||
|
raise ValueError("密码不能为空")
|
||||||
|
if len(password) < 6:
|
||||||
|
raise ValueError("密码长度不能小于 6 位")
|
||||||
|
return pwd_context.hash(password)
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
class ResponseModel(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class DepsModel(BaseModel):
|
||||||
|
pass
|
||||||
|
|
@ -4,9 +4,6 @@ class DemandError(Exception):
|
||||||
class ModelNotExistError(Exception):
|
class ModelNotExistError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class ProviderNotExistError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class UserError(Exception):
|
class UserError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -14,4 +11,10 @@ class UserNotExistError(UserError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class UserPasswordError(UserError):
|
class UserPasswordError(UserError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ProviderError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ProviderNotExistError(ProviderError):
|
||||||
pass
|
pass
|
||||||
|
|
@ -0,0 +1,20 @@
|
||||||
|
from typing import Type, TypeVar
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
T = TypeVar("T", bound=Type[BaseModel])
|
||||||
|
|
||||||
|
def pickle(cls: T) -> T:
|
||||||
|
"""
|
||||||
|
这是一个类装饰器,用来接管 Pydantic 对象的 Pickle 序列化流程。
|
||||||
|
它强迫 Pickle 使用 Pydantic 经过 Rust 优化的 JSON 导出/导入逻辑。
|
||||||
|
"""
|
||||||
|
def __reduce__(self):
|
||||||
|
# 1. 序列化:触发 Pydantic-core (Rust) 的极速序列化
|
||||||
|
data = self.model_dump_json()
|
||||||
|
# 2. 反序列化:告诉 Pickle 重建时调用 cls.model_validate_json
|
||||||
|
return cls.model_validate_json, (data,)
|
||||||
|
|
||||||
|
# 动态把这个魔术方法“缝”到类上
|
||||||
|
cls.__reduce__ = __reduce__
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
@ -7,11 +7,14 @@ requires-python = ">=3.13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"asyncpg>=0.31.0",
|
"asyncpg>=0.31.0",
|
||||||
"docker-py>=1.10.6",
|
"docker-py>=1.10.6",
|
||||||
|
"docutils-stubs==0.0.22",
|
||||||
"httpx>=0.28.1",
|
"httpx>=0.28.1",
|
||||||
"jinja2>=3.1.6",
|
"jinja2>=3.1.6",
|
||||||
"loguru>=0.7.3",
|
"loguru>=0.7.3",
|
||||||
|
"passlib[bcrypt]>=1.7.4",
|
||||||
"pydantic-ai>=1.73.0",
|
"pydantic-ai>=1.73.0",
|
||||||
"python-ulid>=3.1.0",
|
"python-ulid>=3.1.0",
|
||||||
"ray[default,serve]>=2.54.0",
|
"ray[default,serve]>=2.54.0",
|
||||||
"sqlmodel>=0.0.37",
|
"sqlmodel>=0.0.37",
|
||||||
|
"types-docutils==0.22.3.20260408",
|
||||||
]
|
]
|
||||||
|
|
|
||||||
107
uv.lock
107
uv.lock
|
|
@ -183,26 +183,32 @@ source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "asyncpg" },
|
{ name = "asyncpg" },
|
||||||
{ name = "docker-py" },
|
{ name = "docker-py" },
|
||||||
|
{ name = "docutils-stubs" },
|
||||||
{ name = "httpx" },
|
{ name = "httpx" },
|
||||||
{ name = "jinja2" },
|
{ name = "jinja2" },
|
||||||
{ name = "loguru" },
|
{ name = "loguru" },
|
||||||
|
{ name = "passlib", extra = ["bcrypt"] },
|
||||||
{ name = "pydantic-ai" },
|
{ name = "pydantic-ai" },
|
||||||
{ name = "python-ulid" },
|
{ name = "python-ulid" },
|
||||||
{ name = "ray", extra = ["default", "serve"] },
|
{ name = "ray", extra = ["default", "serve"] },
|
||||||
{ name = "sqlmodel" },
|
{ name = "sqlmodel" },
|
||||||
|
{ name = "types-docutils" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "asyncpg", specifier = ">=0.31.0" },
|
{ name = "asyncpg", specifier = ">=0.31.0" },
|
||||||
{ name = "docker-py", specifier = ">=1.10.6" },
|
{ name = "docker-py", specifier = ">=1.10.6" },
|
||||||
|
{ name = "docutils-stubs", specifier = "==0.0.22" },
|
||||||
{ name = "httpx", specifier = ">=0.28.1" },
|
{ name = "httpx", specifier = ">=0.28.1" },
|
||||||
{ name = "jinja2", specifier = ">=3.1.6" },
|
{ name = "jinja2", specifier = ">=3.1.6" },
|
||||||
{ name = "loguru", specifier = ">=0.7.3" },
|
{ name = "loguru", specifier = ">=0.7.3" },
|
||||||
|
{ name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" },
|
||||||
{ name = "pydantic-ai", specifier = ">=1.73.0" },
|
{ name = "pydantic-ai", specifier = ">=1.73.0" },
|
||||||
{ name = "python-ulid", specifier = ">=3.1.0" },
|
{ name = "python-ulid", specifier = ">=3.1.0" },
|
||||||
{ name = "ray", extras = ["default", "serve"], specifier = ">=2.54.0" },
|
{ name = "ray", extras = ["default", "serve"], specifier = ">=2.54.0" },
|
||||||
{ name = "sqlmodel", specifier = ">=0.0.37" },
|
{ name = "sqlmodel", specifier = ">=0.0.37" },
|
||||||
|
{ name = "types-docutils", specifier = "==0.22.3.20260408" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -267,6 +273,72 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" },
|
{ url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bcrypt"
|
||||||
|
version = "5.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "beartype"
|
name = "beartype"
|
||||||
version = "0.22.9"
|
version = "0.22.9"
|
||||||
|
|
@ -633,6 +705,18 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" },
|
{ url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "docutils-stubs"
|
||||||
|
version = "0.0.22"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "docutils" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/31/fb/3eda037eed8b98d6b2169e4198a8f12a03a461c4d4dc44de1a7790d0f7c7/docutils-stubs-0.0.22.tar.gz", hash = "sha256:1736d9650cfc20cff8c72582806c33a5c642694e2df9e430717e7da7e73efbdf", size = 43699, upload-time = "2022-01-02T11:13:17.499Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/59/85/10507e1011d5370b94567e4b57f93086a2476d1da73caf98dc53aa87004b/docutils_stubs-0.0.22-py3-none-any.whl", hash = "sha256:157807309de24e8c96af9a13afe207410f1fc6e5aab5d974fd6b9191f04de327", size = 87506, upload-time = "2022-01-02T11:13:15.94Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "email-validator"
|
name = "email-validator"
|
||||||
version = "2.3.0"
|
version = "2.3.0"
|
||||||
|
|
@ -1838,6 +1922,20 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "passlib"
|
||||||
|
version = "1.7.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", size = 689844, upload-time = "2020-10-08T19:00:52.121Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", size = 525554, upload-time = "2020-10-08T19:00:49.856Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
bcrypt = [
|
||||||
|
{ name = "bcrypt" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pathable"
|
name = "pathable"
|
||||||
version = "0.5.0"
|
version = "0.5.0"
|
||||||
|
|
@ -2928,6 +3026,15 @@ wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" },
|
{ url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "types-docutils"
|
||||||
|
version = "0.22.3.20260408"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/3c/49/48a386fe15539556de085b87a69568b028cca2fa4b92596a3d4f79ac6784/types_docutils-0.22.3.20260408.tar.gz", hash = "sha256:22d5d45e4e0d65a1bc8280987a73e28669bb1cc9d16b18d0afc91713d1be26da", size = 57383, upload-time = "2026-04-08T04:27:26.924Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/08/47/1667fda6e9fcb044f8fb797f6dc4367b88dc2ab40f1a035e387f5405e870/types_docutils-0.22.3.20260408-py3-none-any.whl", hash = "sha256:2545a86966022cdf1468d430b0007eba0837be77974a7f3fafa1b04a6815d531", size = 91981, upload-time = "2026-04-08T04:27:25.934Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "types-protobuf"
|
name = "types-protobuf"
|
||||||
version = "6.32.1.20260221"
|
version = "6.32.1.20260221"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue