From c672c60af69072bd68c40075e864dfc31710027e Mon Sep 17 00:00:00 2001 From: zhaoxi Date: Wed, 25 Mar 2026 20:23:48 +0800 Subject: [PATCH] =?UTF-8?q?feat:=E4=BD=BF=E7=94=A8pytantic=E9=87=8D?= =?UTF-8?q?=E5=86=99=E4=BA=86=E5=A4=A7=E9=83=A8=E5=88=86=E9=80=BB=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- archonbot/core/model_router/model_router.py | 3 - archonbot/core/pipeline/worker.py | 24 ---- .../workflow_manager/workflow__manager.py | 52 -------- .../workflow_generator/workflow_generator.py | 24 ---- .../workflow_json_template.j2 | 13 -- .../individual_plugin/consciousness_node.py | 17 --- archonbot/protocol__plugin/event.py | 11 -- .../docker_sandbox/docker_sandbox_manager.py | 6 - .../docker_sandbox/docker_shell.json | 33 ----- .../docker_sandbox/sandbox_client.py | 8 -- .../docker_sandbox/sandbox_image/Dockerfile | 4 - .../sandbox_image/archon_shell.py | 33 ----- .../sandbox_image/archon_shell_server.py | 16 --- archonbot/workflow_plugin/workflow.py | 49 -------- .../workflow_list/programme_workflow.json | 114 ------------------ {archonbot => pretor}/__init__.py | 0 .../api => pretor/adapter_plugin}/__init__.py | 0 .../adapter_plugin/model_adapter}/__init__.py | 0 .../adapter_plugin/model_adapter}/gemini.py | 2 +- .../model_adapter}/modelbase.py | 0 .../adapter_plugin/model_adapter}/openai.py | 2 +- .../model_router => pretor/api}/__init__.py | 0 .../core/pipeline => pretor/core}/__init__.py | 0 .../core/model_router}/__init__.py | 0 .../core/model_router/model_router.py | 0 .../core/pipeline}/__init__.py | 0 pretor/core/pipeline/pipeline.py | 30 +++++ pretor/core/pipeline/pipeline_router.py | 6 + pretor/core/protocol/demand_protocol.py | 24 ++++ pretor/core/protocol/runnable_object.py | 12 ++ .../core/tool_router}/__init__.py | 0 pretor/core/tool_router/tool_manager.py | 11 ++ .../core/workflow_manager}/__init__.py | 0 pretor/core/workflow_manager/workflow.py | 69 +++++++++++ .../workflow_manager/workflow__manager.py | 36 ++++++ pretor/core/workflow_manager/workflow_docs.md | 23 ++++ .../workflow_template_generator}/__init__.py | 0 .../workflow_template.py | 25 ++++ .../workflow_template_generator.py | 13 ++ .../individual_plugin}/__init__.py | 0 .../consciousness_node}/__init__.py | 0 .../consciousness_node/consciousness_node.py | 0 .../control_node}/__init__.py | 0 .../control_node/control_node.py | 26 ++++ .../control_node/control_register.py | 22 ++++ .../individual_plugin/growth_node/__init__.py | 0 .../growth_node/growth_node.py | 0 .../supervisory_node/__init__.py | 0 .../supervisory_node/supervisory_node.py | 0 pretor/tool_plugin/__init__.py | 0 pretor/utils/error.py | 2 + pretor/utils/inspector.py | 11 ++ .../programme_workflow_template.json | 82 +++++++++++++ 53 files changed, 394 insertions(+), 409 deletions(-) delete mode 100644 archonbot/core/model_router/model_router.py delete mode 100644 archonbot/core/pipeline/worker.py delete mode 100644 archonbot/core/workflow_manager/workflow__manager.py delete mode 100644 archonbot/core/workflow_manager/workflow_generator/workflow_generator.py delete mode 100644 archonbot/core/workflow_manager/workflow_generator/workflow_json_template.j2 delete mode 100644 archonbot/individual_plugin/consciousness_node.py delete mode 100644 archonbot/protocol__plugin/event.py delete mode 100644 archonbot/tool_plugin/docker_sandbox/docker_sandbox_manager.py delete mode 100644 archonbot/tool_plugin/docker_sandbox/docker_shell.json delete mode 100644 archonbot/tool_plugin/docker_sandbox/sandbox_client.py delete mode 100644 archonbot/tool_plugin/docker_sandbox/sandbox_image/Dockerfile delete mode 100644 archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell.py delete mode 100644 archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell_server.py delete mode 100644 archonbot/workflow_plugin/workflow.py delete mode 100644 archonbot/workflow_plugin/workflow_list/programme_workflow.json rename {archonbot => pretor}/__init__.py (100%) rename {archonbot/api => pretor/adapter_plugin}/__init__.py (100%) rename {archonbot/core => pretor/adapter_plugin/model_adapter}/__init__.py (100%) rename {archonbot/protocol__plugin/model_protocol => pretor/adapter_plugin/model_adapter}/gemini.py (96%) rename {archonbot/protocol__plugin/model_protocol => pretor/adapter_plugin/model_adapter}/modelbase.py (100%) rename {archonbot/protocol__plugin/model_protocol => pretor/adapter_plugin/model_adapter}/openai.py (94%) rename {archonbot/core/model_router => pretor/api}/__init__.py (100%) rename {archonbot/core/pipeline => pretor/core}/__init__.py (100%) rename {archonbot/core/tool_router => pretor/core/model_router}/__init__.py (100%) rename archonbot/core/workflow_manager/__init__.py => pretor/core/model_router/model_router.py (100%) rename {archonbot/core/workflow_manager/workflow_generator => pretor/core/pipeline}/__init__.py (100%) create mode 100644 pretor/core/pipeline/pipeline.py create mode 100644 pretor/core/pipeline/pipeline_router.py create mode 100644 pretor/core/protocol/demand_protocol.py create mode 100644 pretor/core/protocol/runnable_object.py rename {archonbot/individual_plugin => pretor/core/tool_router}/__init__.py (100%) create mode 100644 pretor/core/tool_router/tool_manager.py rename {archonbot/protocol__plugin => pretor/core/workflow_manager}/__init__.py (100%) create mode 100644 pretor/core/workflow_manager/workflow.py create mode 100644 pretor/core/workflow_manager/workflow__manager.py create mode 100644 pretor/core/workflow_manager/workflow_docs.md rename {archonbot/protocol__plugin/model_protocol => pretor/core/workflow_manager/workflow_template_generator}/__init__.py (100%) create mode 100644 pretor/core/workflow_manager/workflow_template_generator/workflow_template.py create mode 100644 pretor/core/workflow_manager/workflow_template_generator/workflow_template_generator.py rename {archonbot/tool_plugin => pretor/individual_plugin}/__init__.py (100%) rename {archonbot/tool_plugin/docker_sandbox => pretor/individual_plugin/consciousness_node}/__init__.py (100%) rename archonbot/individual_plugin/control_node.py => pretor/individual_plugin/consciousness_node/consciousness_node.py (100%) rename {archonbot/workflow_plugin => pretor/individual_plugin/control_node}/__init__.py (100%) create mode 100644 pretor/individual_plugin/control_node/control_node.py create mode 100644 pretor/individual_plugin/control_node/control_register.py rename archonbot/individual_plugin/growth_node.py => pretor/individual_plugin/growth_node/__init__.py (100%) rename archonbot/individual_plugin/supervisory_node.py => pretor/individual_plugin/growth_node/growth_node.py (100%) rename archonbot/tool_plugin/docker_sandbox/sandbox_image/main.py => pretor/individual_plugin/supervisory_node/__init__.py (100%) create mode 100644 pretor/individual_plugin/supervisory_node/supervisory_node.py create mode 100644 pretor/tool_plugin/__init__.py create mode 100644 pretor/utils/error.py create mode 100644 pretor/utils/inspector.py create mode 100644 pretor/workflow_plugin/programme_workflow_template.json diff --git a/archonbot/core/model_router/model_router.py b/archonbot/core/model_router/model_router.py deleted file mode 100644 index 1f82d4c..0000000 --- a/archonbot/core/model_router/model_router.py +++ /dev/null @@ -1,3 +0,0 @@ -class ArchonModelRouter: - def __init__(self): - self.handler = {} \ No newline at end of file diff --git a/archonbot/core/pipeline/worker.py b/archonbot/core/pipeline/worker.py deleted file mode 100644 index 30589d9..0000000 --- a/archonbot/core/pipeline/worker.py +++ /dev/null @@ -1,24 +0,0 @@ -import asyncio -from archonbot.protocol__plugin.event import ArchonMessageEvent - -class ArchonWorker: - def __init__(self): - self.workflow_queue = asyncio.Queue() - self.workflow_router = {} - - def add_event(self, event: ArchonMessageEvent): - self.workflow_queue.put(event) - - async def run(self): - while True: - try: - event : ArchonMessageEvent = self.workflow_queue.get() - match event.target: - case "plugin": - pass - case _: - pass - except: - pass - finally: - pass \ No newline at end of file diff --git a/archonbot/core/workflow_manager/workflow__manager.py b/archonbot/core/workflow_manager/workflow__manager.py deleted file mode 100644 index 38b21fc..0000000 --- a/archonbot/core/workflow_manager/workflow__manager.py +++ /dev/null @@ -1,52 +0,0 @@ -import json -from pathlib import Path -from loguru import logger -from archonbot.workflow_plugin.workflow import Workflow -from archonbot.core.workflow_manager.workflow_generator.workflow_generator import WorkflowGenerator - -#工作流管理器,管理所有的工作流 -class WorkflowManager: - def __init__(self): - self.workflow_registry = {} - self._load_workflow_registry() - - #_load_workflow_registry(加载工作流登记表),在工作流管理器初始化时将工作流文件加载到工作流管理器 - def _load_workflow_registry(self) -> None: - plugin_dir = Path("archonbot/workflow_plugin/workflow_list") - for file_path in plugin_dir.glob("*_workflow.json"): - try: - module_name = file_path.stem.rsplit("_",1)[0] - with file_path.open("r", encoding="utf-8") as file: - workflow = json.load(file) - self.workflow_registry[module_name] = workflow.get("description") - logger.success("已加载工作流{}".format(module_name)) - except: - logger.warning("工作流文件{}加载失败".format(file_path)) - - #init_workflow(初始化工作流),创建一个工作流并且注册到工作流管理器,并且生成对应的工作流文件到对应文件夹 - def init_workflow(self, workflow_name : str, description : str, metadata : dict, work_link : list) -> None: - try: - WorkflowGenerator.generate(workflow_name, description, metadata, work_link) - self.workflow_registry[workflow_name] = description - logger.success("已创建{}工作流".format(workflow_name)) - except FileExistsError: - logger.warning("{}工作流创建失败,错误原因:文件已存在".format(workflow_name)) - except Exception as e: - logger.warning("{}工作流创建失败,错误原因:{}".format(workflow_name,e)) - - #get_workflow(获取工作流),将event对象转化为workflow对象并返回 - def get_workflow(self, workflow_title : str, workflow_command: str, workflow_name : str) -> Workflow: - if workflow_name not in self.workflow_registry: - logger.error(f"尝试启动未注册的工作流: {workflow_name}") - raise ValueError(f"Workflow {workflow_name} not found in registry.") - workflow = Workflow() - workflow.create_workflow(workflow_title, workflow_command, workflow_name) - return workflow - - #get_workflow_list(获取工作流注册表),将工作流管理器中已经注册的工作流转化为格式化的json格式返回给llm - def get_workflow_list(self) -> str: - if not self.workflow_registry: - return "目前暂无可用工作流,请先通过指导文件创建。" - workflow_list = [{"workflow_name": workflow_name, "description": description} for workflow_name, description in self.workflow_registry.items()] - workflow_dict = {"name":"可用工作流表", "workflow_list":workflow_list} - return json.dumps(workflow_dict) \ No newline at end of file diff --git a/archonbot/core/workflow_manager/workflow_generator/workflow_generator.py b/archonbot/core/workflow_manager/workflow_generator/workflow_generator.py deleted file mode 100644 index ec0ac49..0000000 --- a/archonbot/core/workflow_manager/workflow_generator/workflow_generator.py +++ /dev/null @@ -1,24 +0,0 @@ -from pathlib import Path -from jinja2 import Template - -class WorkflowGenerator: - @staticmethod - def generate(workflow_name : str, description : str, metadata : dict, work_link : list) -> None: - #检查文件是否存在并生成工作流配置文件 - target_path = Path("archonbot/workflow_plugin/workflow_list/") - workflow_file = target_path / "{}_workflow.json".format(workflow_name) - target_path.mkdir(parents=True, exist_ok=True) - if workflow_file.exists(): - raise FileExistsError(f"file {workflow_file} already exists") - #加载配置模板 - current_dir = Path(__file__).parent - template_file = current_dir / "workflow_json_template.j2" - with open(template_file) as f: - template = Template(f.read()) - #渲染并生成配置文件 - render_context = template.render(name=workflow_name, - description=description, - metadata=metadata, - works=work_link) - with open(workflow_file, "w", encoding="utf-8") as f: - f.write(render_context) \ No newline at end of file diff --git a/archonbot/core/workflow_manager/workflow_generator/workflow_json_template.j2 b/archonbot/core/workflow_manager/workflow_generator/workflow_json_template.j2 deleted file mode 100644 index a4f1e2c..0000000 --- a/archonbot/core/workflow_manager/workflow_generator/workflow_json_template.j2 +++ /dev/null @@ -1,13 +0,0 @@ -{ - "name": "{{ name }}", - "version": "1.0", - "description": "{{ description }}" - "metadata": { - "limit": {{ metadata.limit | default(10) }} - }, - "work_link": [ - {% for work in works %} - {{ work | tojson }}{% if not loop.last %},{% endif %} - {% endfor %} - ] -} \ No newline at end of file diff --git a/archonbot/individual_plugin/consciousness_node.py b/archonbot/individual_plugin/consciousness_node.py deleted file mode 100644 index b121036..0000000 --- a/archonbot/individual_plugin/consciousness_node.py +++ /dev/null @@ -1,17 +0,0 @@ -import ray -from archonbot.protocol__plugin.model_protocol.modelbase import ModelBase - -@ray.remote -class ConsciousnessNode: - def __init__(self): - self.model_id : str - self.path : str - self.adapter : str - self.name : str - self.model_method : ModelBase - - async def get_model(self): - return await self.model_method.get_model - - async def post_message(self): - return await self.model_method.post_message \ No newline at end of file diff --git a/archonbot/protocol__plugin/event.py b/archonbot/protocol__plugin/event.py deleted file mode 100644 index ea018ee..0000000 --- a/archonbot/protocol__plugin/event.py +++ /dev/null @@ -1,11 +0,0 @@ -from ulid import ULID - -class ArchonMessageEvent: - def __init__(self): - event_id : ULID - user : str - command : str - target : str - requirement : dict - payload : dict - context : dict diff --git a/archonbot/tool_plugin/docker_sandbox/docker_sandbox_manager.py b/archonbot/tool_plugin/docker_sandbox/docker_sandbox_manager.py deleted file mode 100644 index 680fc52..0000000 --- a/archonbot/tool_plugin/docker_sandbox/docker_sandbox_manager.py +++ /dev/null @@ -1,6 +0,0 @@ -import docker -import socket - -class DockerSandBoxManager(): - def __init__(self): - pass \ No newline at end of file diff --git a/archonbot/tool_plugin/docker_sandbox/docker_shell.json b/archonbot/tool_plugin/docker_sandbox/docker_shell.json deleted file mode 100644 index 56eabc3..0000000 --- a/archonbot/tool_plugin/docker_sandbox/docker_shell.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "docker_sandbox", - "desc": "一款通过docker实现环境隔离的沙箱环境,实现安全地任务实现", - "command": [ - { - "name": "read", - "desc": "浏览文件", - "param": { - "-p $PATH": "浏览$PATH下的文件", - "-h $LINE": "浏览前$LINE行文件" - } - }, - { - "name": "write", - "desc": "写入文件", - "param": { - "-p $PATH": "写入$PATH下的文件", - "-t $TEXT": "将$TEXT写入文件" - } - }, - { - "name": "ls", - "desc": "获取文件列表", - "param": { - "-l $PATH": "获取$PATH下的文件" - } - }, - { - - } - ], - "specification": "" -} \ No newline at end of file diff --git a/archonbot/tool_plugin/docker_sandbox/sandbox_client.py b/archonbot/tool_plugin/docker_sandbox/sandbox_client.py deleted file mode 100644 index 3d8b3df..0000000 --- a/archonbot/tool_plugin/docker_sandbox/sandbox_client.py +++ /dev/null @@ -1,8 +0,0 @@ -import docker -import socket - -class SandboxClient: - def __init__(self, sandbox_id : int, ): - self.sandbox_id : int - - client = docker.from_env() diff --git a/archonbot/tool_plugin/docker_sandbox/sandbox_image/Dockerfile b/archonbot/tool_plugin/docker_sandbox/sandbox_image/Dockerfile deleted file mode 100644 index 43d5206..0000000 --- a/archonbot/tool_plugin/docker_sandbox/sandbox_image/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM ubuntu:latest -LABEL authors="zhaoxi" - -ENTRYPOINT ["top", "-b"] \ No newline at end of file diff --git a/archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell.py b/archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell.py deleted file mode 100644 index 0639b5b..0000000 --- a/archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell.py +++ /dev/null @@ -1,33 +0,0 @@ -class ArchonShell: - @staticmethod - def read(): - pass - - @staticmethod - def write(): - pass - - @staticmethod - def ls(): - pass - - @staticmethod - def mkdir(): - pass - - @staticmethod - def exec_py(): - pass - - @staticmethod - def exec_shell(): - pass - - @staticmethod - def kill(): - pass - - @staticmethod - def submit(): - pass - diff --git a/archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell_server.py b/archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell_server.py deleted file mode 100644 index f489a05..0000000 --- a/archonbot/tool_plugin/docker_sandbox/sandbox_image/archon_shell_server.py +++ /dev/null @@ -1,16 +0,0 @@ -import os -import sys -from loguru import logger -import socket -import multiprocessing - -class ArchonShellServer: - def __init__(self): - self.workspace_path = os.environ.get("ARCHON_WORKSPACE") - self.socket_path = os.environ.get("ARCHON_SOCKET") - self.signal_path = os.environ.get("ARCHON_SIGNAL") - - def run(self): - while True: - pass - diff --git a/archonbot/workflow_plugin/workflow.py b/archonbot/workflow_plugin/workflow.py deleted file mode 100644 index a46299b..0000000 --- a/archonbot/workflow_plugin/workflow.py +++ /dev/null @@ -1,49 +0,0 @@ -from pathlib import Path -import json -from ulid import ULID - -class Workflow: - def __init__(self): - self.workflow_id : str = "" - self.workflow_title: str = "" - self.work_link: list = [] - self.workflow_description: str = "" - self.workflow_command: str = "" - self.workflow_output: dict = {} - self.workflow_metadata : dict = {} - self.work_demand: dict = {} - self.status: str = "" - - def create_workflow(self, trace_id : str, workflow_title: str, workflow_command: str, workflow_name : str) -> None: - current_dir = Path(__file__).parent - workflow_file = current_dir / "workflow_list" / "{}_workflow.json".format(workflow_name) - with workflow_file.open("r", encoding="utf-8") as json_file: - workflow_json = json.load(json_file) - self.workflow_id = "{}_".format(workflow_name) + trace_id - self.workflow_title = workflow_title - self.work_link = workflow_json.get("work_link") - self.workflow_description = workflow_json.get("workflow_description") - self.workflow_command = workflow_command - self.workflow_metadata = workflow_json.get("metadata") - self.status = "step1" - - def get_workflow(self) -> str: - workflow = { - "workflow_id":self.workflow_id, - "workflow_title":self.workflow_title, - "work_link":self.work_link, - "workflow_command":self.workflow_command, - "workflow_output":self.workflow_output, - "workflow_metadata":self.workflow_metadata, - "work_demand":self.work_demand, - "status":self.status, - } - workflow = json.dumps(workflow) - return workflow - - def set_output(self, step, output) -> None: - self.workflow_output["step:{}".format(step)] = output - - def set_work_link(self, work_link: str) -> None: - work_link = json.loads(work_link) - self.work_link = work_link \ No newline at end of file diff --git a/archonbot/workflow_plugin/workflow_list/programme_workflow.json b/archonbot/workflow_plugin/workflow_list/programme_workflow.json deleted file mode 100644 index ccd3240..0000000 --- a/archonbot/workflow_plugin/workflow_list/programme_workflow.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "name": "programme", - "version": "1.0", - "description": "编写程序的工作链模版,用于完成一个编程任务", - "metadata": { - }, - "work_link": [ - { - "step": 1, - "node": "consciousness_node", - "action": "architect", - "desc": "构建程序架构,定义子个体需求与工作链变更", - "output": "arch_spec", - "status": "waiting" - }, - { - "step": 2, - "node": "control_node", - "action": "spawn_actors", - "desc": "根据 arch_spec 拉起子个体,挂载对应目录", - "input": "arch_spec", - "status": "waiting" - }, - { - "step": 3, - "node": "composite_individual", - "action": "decompose", - "desc": "拆解 arch_spec 为原子任务包 (Task Packets)", - "input": "arch_spec", - "output": "task_packets", - "status": "waiting" - }, - { - "step": 4, - "node": "primary_individual", - "action": "execute_code", - "desc": "执行编码任务,写入目标文件", - "input": "task_packets", - "output": "source_code", - "status": "waiting" - }, - { - "step": 5, - "node": "composite_individual", - "action": "audit", - "desc": "静态逻辑检查与代码规范审计", - "input": "source_code", - "output": "audit_report", - "status": "waiting" - }, - { - "step": 6, - "node": "control_node", - "action": "resource_recycle", - "desc": "暂存编码 Actor 状态,释放非必要显存", - "input": "audit_report", - "status": "waiting" - }, - { - "step": 7, - "node": "consciousness_node", - "action": "design_test", - "desc": "基于 source_code 设计测试用例架构 (Test Bench)", - "input": "source_code", - "output": "test_spec", - "status": "waiting" - }, - { - "step": 8, - "node": "control_node", - "action": "spawn_test_env", - "desc": "拉起测试专用子个体并分配执行环境", - "input": "test_spec", - "status": "waiting" - }, - { - "step": 9, - "node": "primary_individual", - "action": "run_test", - "desc": "运行测试并生成实验报告 (Experiment Report)", - "input": "test_spec", - "output": "test_report", - "status": "waiting" - }, - { - "step": 10, - "node": "consciousness_node", - "action": "analyze_report", - "desc": "研究测试报告,决定是否触发迭代循环", - "input": "test_report", - "logic_gate": { - "if_fail": "jump_to_step_1", - "if_pass": "continue" - }, - "status": "waiting" - }, - { - "step": 11, - "node": "consciousness_node", - "action": "finalize", - "desc": "总结全流程报告,提交归档", - "output": "final_package", - "status": "waiting" - }, - { - "step": 12, - "node": "supervisory_node", - "action": "terminate_workflow", - "desc": "核对 final_package,关闭工作流并向用户反馈", - "input": "final_package", - "status": "waiting" - } - ] -} \ No newline at end of file diff --git a/archonbot/__init__.py b/pretor/__init__.py similarity index 100% rename from archonbot/__init__.py rename to pretor/__init__.py diff --git a/archonbot/api/__init__.py b/pretor/adapter_plugin/__init__.py similarity index 100% rename from archonbot/api/__init__.py rename to pretor/adapter_plugin/__init__.py diff --git a/archonbot/core/__init__.py b/pretor/adapter_plugin/model_adapter/__init__.py similarity index 100% rename from archonbot/core/__init__.py rename to pretor/adapter_plugin/model_adapter/__init__.py diff --git a/archonbot/protocol__plugin/model_protocol/gemini.py b/pretor/adapter_plugin/model_adapter/gemini.py similarity index 96% rename from archonbot/protocol__plugin/model_protocol/gemini.py rename to pretor/adapter_plugin/model_adapter/gemini.py index 1ae9d0f..5b8b92e 100644 --- a/archonbot/protocol__plugin/model_protocol/gemini.py +++ b/pretor/adapter_plugin/model_adapter/gemini.py @@ -1,7 +1,7 @@ import httpx import json from typing import List, Dict, Any, AsyncGenerator -from archonbot.protocol__plugin.model_protocol.modelbase import ModelBase +from pretor.protocol_plugin.model_protocol.modelbase import ModelBase class GeminiAdapter(ModelBase): def __init__(self, base_url: str, adapter_title: str, api_key: str): diff --git a/archonbot/protocol__plugin/model_protocol/modelbase.py b/pretor/adapter_plugin/model_adapter/modelbase.py similarity index 100% rename from archonbot/protocol__plugin/model_protocol/modelbase.py rename to pretor/adapter_plugin/model_adapter/modelbase.py diff --git a/archonbot/protocol__plugin/model_protocol/openai.py b/pretor/adapter_plugin/model_adapter/openai.py similarity index 94% rename from archonbot/protocol__plugin/model_protocol/openai.py rename to pretor/adapter_plugin/model_adapter/openai.py index ee0f00f..77261d5 100644 --- a/archonbot/protocol__plugin/model_protocol/openai.py +++ b/pretor/adapter_plugin/model_adapter/openai.py @@ -1,5 +1,5 @@ import httpx -from archonbot.protocol__plugin.model_protocol.modelbase import ModelBase +from pretor.protocol_plugin.model_protocol.modelbase import ModelBase class OpenAIAdapter(ModelBase): def __init__(self, base_url: str, adapter_title: str, api_key: str = "archon-local"): diff --git a/archonbot/core/model_router/__init__.py b/pretor/api/__init__.py similarity index 100% rename from archonbot/core/model_router/__init__.py rename to pretor/api/__init__.py diff --git a/archonbot/core/pipeline/__init__.py b/pretor/core/__init__.py similarity index 100% rename from archonbot/core/pipeline/__init__.py rename to pretor/core/__init__.py diff --git a/archonbot/core/tool_router/__init__.py b/pretor/core/model_router/__init__.py similarity index 100% rename from archonbot/core/tool_router/__init__.py rename to pretor/core/model_router/__init__.py diff --git a/archonbot/core/workflow_manager/__init__.py b/pretor/core/model_router/model_router.py similarity index 100% rename from archonbot/core/workflow_manager/__init__.py rename to pretor/core/model_router/model_router.py diff --git a/archonbot/core/workflow_manager/workflow_generator/__init__.py b/pretor/core/pipeline/__init__.py similarity index 100% rename from archonbot/core/workflow_manager/workflow_generator/__init__.py rename to pretor/core/pipeline/__init__.py diff --git a/pretor/core/pipeline/pipeline.py b/pretor/core/pipeline/pipeline.py new file mode 100644 index 0000000..b9c17c3 --- /dev/null +++ b/pretor/core/pipeline/pipeline.py @@ -0,0 +1,30 @@ +import asyncio +import ray +from pretor.core.pipeline.pipeline_router import PipelineRouter +from pretor.core.workflow_manager.workflow import PretorWorkflow +from loguru import logger + +@ray.remote +class PretorPipeline: + def __init__(self): + self.pipeline = asyncio.Queue() + self.running =True + self.worker_group = [] + async def running(self): + for i in range(10): + self.worker_group.append(await self.worker()) + + async def worker(self): + while True: + workflow = await self.pipeline.get() + try: + logger.info(f"{workflow.title}开始运行") + for work_item in workflow.work_link: + await PipelineRouter.router(workflow, work_item) + except: + logger.error(f"{workflow.title}遭受致命错误,已结束") + continue + + async def submit_workflow(self, workflow: PretorWorkflow): + await self.pipeline.put(workflow) + logger.info(f"任务已进入受理队列,当前排队数: {self.pipeline.qsize()}") \ No newline at end of file diff --git a/pretor/core/pipeline/pipeline_router.py b/pretor/core/pipeline/pipeline_router.py new file mode 100644 index 0000000..f432ec6 --- /dev/null +++ b/pretor/core/pipeline/pipeline_router.py @@ -0,0 +1,6 @@ +import asyncio + +class PipelineRouter: + @staticmethod + async def router(workflow): + pass \ No newline at end of file diff --git a/pretor/core/protocol/demand_protocol.py b/pretor/core/protocol/demand_protocol.py new file mode 100644 index 0000000..aeea0b1 --- /dev/null +++ b/pretor/core/protocol/demand_protocol.py @@ -0,0 +1,24 @@ +from typing import List, Optional, Union, Dict, Any, Literal +from pydantic import BaseModel, Field + +# --- 1. 给 Individual (LLM/Agent) 的具体需求 --- +class IndividualDemand(BaseModel): + role_prompt: str = Field(..., description="赋予该个体的角色定义") + task_goal: str = Field(..., description="该个体的具体执行目标") + expected_output: str = Field(..., description="期望产出的数据结构或格式描述") + +# --- 2. 给 Tool (插件/函数调用) 的具体需求 --- +class ToolDemand(BaseModel): + method: str = Field(..., description="插件调用的具体方法名") + args: Dict[str, Any] = Field(default_factory=dict, description="传递给插件的参数") + +# --- 3. 给 System (系统/物理资源) 的具体需求 --- +class SystemDemand(BaseModel): + operation: Literal["allocate_resource", "docker_manage", "file_io", "network"] + params: Dict[str, Any] = Field(..., description="操作所需的物理参数,如 GPU 核心数、路径等") + +# --- 4. 统一需求入口 (裁判官协议体) --- +class DemandProtocol(BaseModel): + variety: Literal["individual", "tool", "system"] + name: str = Field(..., description="目标名称(如:python_expert, pytest_tool, docker_engine)") + content: Union[IndividualDemand, ToolDemand, SystemDemand] = Field(..., description="需求的具体参数细节") \ No newline at end of file diff --git a/pretor/core/protocol/runnable_object.py b/pretor/core/protocol/runnable_object.py new file mode 100644 index 0000000..cdc22a9 --- /dev/null +++ b/pretor/core/protocol/runnable_object.py @@ -0,0 +1,12 @@ +from abc import ABC,abstractmethod +from pretor.core.workflow_manager.workflow import PretorWorkflow + + +class RunnableObject(ABC): + @abstractmethod + def __init__(self, **kwargs): + pass + + @abstractmethod + async def run(self, workflow: PretorWorkflow) -> None: + pass \ No newline at end of file diff --git a/archonbot/individual_plugin/__init__.py b/pretor/core/tool_router/__init__.py similarity index 100% rename from archonbot/individual_plugin/__init__.py rename to pretor/core/tool_router/__init__.py diff --git a/pretor/core/tool_router/tool_manager.py b/pretor/core/tool_router/tool_manager.py new file mode 100644 index 0000000..1a20b94 --- /dev/null +++ b/pretor/core/tool_router/tool_manager.py @@ -0,0 +1,11 @@ + + +class ToolManager: + def __init__(self): + pass + + def _load_tool_registry(self): + pass + + def run_tool(self, tool_name, tool_desc): + pass diff --git a/archonbot/protocol__plugin/__init__.py b/pretor/core/workflow_manager/__init__.py similarity index 100% rename from archonbot/protocol__plugin/__init__.py rename to pretor/core/workflow_manager/__init__.py diff --git a/pretor/core/workflow_manager/workflow.py b/pretor/core/workflow_manager/workflow.py new file mode 100644 index 0000000..d59e160 --- /dev/null +++ b/pretor/core/workflow_manager/workflow.py @@ -0,0 +1,69 @@ +from typing import List, Optional, Union, Literal, Dict, Any +from pydantic import BaseModel, Field, model_validator +from ulid import ULID +from pretor.core.protocol.demand_protocol import DemandProtocol +NodeType = Literal[ + "consciousness_node", "control_node", "supervisory_node", + "composite_individual", "primary_individual" +] + +class LogicGate(BaseModel): + if_fail: str = Field(..., description="失败跳转目标,如 'jump_to_step_1'") + if_pass: Literal["continue", "exit"] = Field(default="continue", description="成功后的动作") + +class WorkStep(BaseModel): + step: int = Field(..., gt=0, description="步骤序号,严格自增") + node: NodeType = Field(..., description="负责执行的节点类型") + action: str = Field(..., description="执行的原子动作") + desc: str = Field(..., description="动作细节的自然语言描述,包含人工规范指导") + input: Optional[Union[str, List[str]]] = Field(default=None, description="前置依赖输出") + output: Optional[str] = Field(default=None, description="当前步骤产出物变量名") + logic_gate: Optional[LogicGate] = Field(default=None, description="逻辑跳转控制") + status: Literal["waiting", "running", "completed", "failed"] = Field( + default="waiting", + description="执行状态 (LLM建议保留默认值)" + ) + +class WorkerGroup(BaseModel): + name: str = Field(..., description="工作组名称,如 'coding_squad'") + primary_individual: Dict[str, int] = Field(..., description="基础子个体配置,例如 {'coder': 2, 'tester': 1}") + composite_individual: Dict[str, int] = Field(..., description="复合子个体配置,例如 {'code_reviewer': 1}") + + +class WorkflowStatus(BaseModel): + step: int = Field(default=1, gt=0, description="当前运行到的工作流步数") + status: Literal["waiting_llm_working", "waiting_tool_working", "llm_working", "tool_working"] = Field( + default="waiting_llm_working", + description="当前系统调度状态" + ) + demand: DemandProtocol = Field(default=None, description="需要的资源或插件调用请求") + +class PretorWorkflow(BaseModel): + title: str = Field(..., description="工作流的标题") + workgroup_list: List[WorkerGroup] = Field(..., description="工作组资源编排列表") + work_link: List[WorkStep] = Field(..., description="工作链逻辑定义") + # ---------------- 以下为系统级管控字段,LLM 无需关心 ---------------- # + trace_id: str = Field(default_factory=lambda: str(ULID()), description="系统自动生成的追溯ID") + version: str = Field(default="v1.0", description="系统协议版本号") + command: Optional[str] = Field(default=None, description="触发此工作流的原始命令") + output: Dict[str, Any] = Field(default_factory=dict, description="工作流最终产出结果") + status: WorkflowStatus = Field(default_factory=WorkflowStatus, description="运行时状态对象") + + @model_validator(mode='after') + def validate_workflow_integrity(self) -> 'PretorWorkflow': + steps = [s.step for s in self.work_link] + expected = list(range(1, len(steps) + 1)) + if steps != expected: + raise ValueError(f"工作链步数不连续!期望 {expected},实际 {steps}") + + max_step = len(steps) + for s in self.work_link: + if s.logic_gate and "jump_to_step_" in s.logic_gate.if_fail: + try: + target = int(s.logic_gate.if_fail.split("_")[-1]) + if target > max_step or target < 1: + raise ValueError(f"Step {s.step} 的跳转目标 Step {target} 越界了!") + except ValueError as e: + if "越界" in str(e): raise e + raise ValueError(f"LogicGate 格式错误: {s.logic_gate.if_fail}") + return self \ No newline at end of file diff --git a/pretor/core/workflow_manager/workflow__manager.py b/pretor/core/workflow_manager/workflow__manager.py new file mode 100644 index 0000000..903c671 --- /dev/null +++ b/pretor/core/workflow_manager/workflow__manager.py @@ -0,0 +1,36 @@ +import json +from pretor.core.workflow_manager.workflow_template_generator.workflow_template_generator import WorkflowTemplateGenerator +from pretor.core.workflow_manager.workflow import PretorWorkflow +from pathlib import Path +from loguru import logger + +class WorkflowManager: + def __init__(self): + self.workflow_template_generator = WorkflowTemplateGenerator() + self.workflow_templates_registry = {} + self.template_path = Path("pretor/workflow_plugin") + self._load_workflow_template() + + + def _load_workflow_template(self) -> None: + for workflow_template_file in self.template_path.glob("*_workflow_template.json"): + with workflow_template_file.open("r",encoding="utf-8") as f: + try: + workflow_template = json.load(f) + self.workflow_templates_registry[workflow_template.get("name")] = workflow_template.get("desc") + except json.decoder.JSONDecodeError: + logger.warning(f"{workflow_template_file}不是json文件或格式错误") + except KeyError: + logger.warning(f"{workflow_template_file}不符合workflow_template格式") + + + def generate_workflow_template(self, name: str, desc: str, steps: list) -> None: + try: + self.workflow_template_generator.generate_workflow_template(name=name, desc=desc, steps=steps) + except: + pass + + @staticmethod + def create_workflow(workflow_json: str) -> PretorWorkflow: + workflow = PretorWorkflow.model_validate_json(workflow_json) + return workflow \ No newline at end of file diff --git a/pretor/core/workflow_manager/workflow_docs.md b/pretor/core/workflow_manager/workflow_docs.md new file mode 100644 index 0000000..b612a1c --- /dev/null +++ b/pretor/core/workflow_manager/workflow_docs.md @@ -0,0 +1,23 @@ +# workflow文档 +--- +- workflow(工作流)是作为pretor中运行任务的基本单位,workflow_manager管理整个workflow模块,包括生成workflow_template(工作流模板),生成workflow对象,和保存整个workflow_template表。 +- workflow_template是一个工作流模板,旨在由专业人士教导LLM如何编写工作流并进行任务,每个workflow_template都应该保存在 **pretor/workflow_pugin/** 文件夹下,保存格式为~_workflow_template.json,json格式为: + +```json +{ + "name": "", + "desc": "", + "work_link": [ + { + "step": "", + "node": "", + "action": "", + "desc": "", + "input": [], + "output": [], + "logic_gate": {} + } + ] +} +``` +- workflow_template将由监管节点挑选交给意识节点,意识节点按照参考模板生成标准的workflow对象,转交给pipeline开始执行任务链。 \ No newline at end of file diff --git a/archonbot/protocol__plugin/model_protocol/__init__.py b/pretor/core/workflow_manager/workflow_template_generator/__init__.py similarity index 100% rename from archonbot/protocol__plugin/model_protocol/__init__.py rename to pretor/core/workflow_manager/workflow_template_generator/__init__.py diff --git a/pretor/core/workflow_manager/workflow_template_generator/workflow_template.py b/pretor/core/workflow_manager/workflow_template_generator/workflow_template.py new file mode 100644 index 0000000..8f937ca --- /dev/null +++ b/pretor/core/workflow_manager/workflow_template_generator/workflow_template.py @@ -0,0 +1,25 @@ +from pydantic import BaseModel, model_validator +from typing import Dict,List + +class WorkflowTemplateStep(BaseModel): + step: int + node: str + action: str + desc: str + input: List[str] + output: List[str] + logic_gate: Dict[str, str] + +class WorkflowTemplate(BaseModel): + name: str + desc: str + work_link: list[WorkflowTemplateStep] + + @model_validator(mode='after') + def validate_steps(self) -> 'WorkflowTemplate': + steps = [s.step for s in self.work_link] + if len(steps) != len(set(steps)): + raise ValueError("Step numbers in work_link must be unique") + return self + + \ No newline at end of file diff --git a/pretor/core/workflow_manager/workflow_template_generator/workflow_template_generator.py b/pretor/core/workflow_manager/workflow_template_generator/workflow_template_generator.py new file mode 100644 index 0000000..4ec3090 --- /dev/null +++ b/pretor/core/workflow_manager/workflow_template_generator/workflow_template_generator.py @@ -0,0 +1,13 @@ +from pathlib import Path +from pretor.core.workflow_manager.workflow_template_generator.workflow_template import WorkflowTemplate + +class WorkflowTemplateGenerator: + @staticmethod + def generate_workflow_template(name: str, desc: str, steps: list) -> None: + workflow_template = WorkflowTemplate(name=name, desc=desc, work_link=steps) + output_dir = Path("pretor.workflow_plugin") + if not output_dir.exists(): + output_dir.mkdir(parents=True) + output_file = output_dir / f"{name}_workflow_template.json" + with output_file.open("w", encoding="utf-8") as f: + f.write(workflow_template.model_dump_json(indent=4)) \ No newline at end of file diff --git a/archonbot/tool_plugin/__init__.py b/pretor/individual_plugin/__init__.py similarity index 100% rename from archonbot/tool_plugin/__init__.py rename to pretor/individual_plugin/__init__.py diff --git a/archonbot/tool_plugin/docker_sandbox/__init__.py b/pretor/individual_plugin/consciousness_node/__init__.py similarity index 100% rename from archonbot/tool_plugin/docker_sandbox/__init__.py rename to pretor/individual_plugin/consciousness_node/__init__.py diff --git a/archonbot/individual_plugin/control_node.py b/pretor/individual_plugin/consciousness_node/consciousness_node.py similarity index 100% rename from archonbot/individual_plugin/control_node.py rename to pretor/individual_plugin/consciousness_node/consciousness_node.py diff --git a/archonbot/workflow_plugin/__init__.py b/pretor/individual_plugin/control_node/__init__.py similarity index 100% rename from archonbot/workflow_plugin/__init__.py rename to pretor/individual_plugin/control_node/__init__.py diff --git a/pretor/individual_plugin/control_node/control_node.py b/pretor/individual_plugin/control_node/control_node.py new file mode 100644 index 0000000..b397147 --- /dev/null +++ b/pretor/individual_plugin/control_node/control_node.py @@ -0,0 +1,26 @@ +from pretor.core.protocol.runnable_object import RunnableObject +from pretor.core.workflow_manager.workflow import PretorWorkflow +from pretor.adapter_plugin.model_adapter.modelbase import ModelBase +import ray +from typing import Any,Dict +from pretor.individual_plugin.control_node.control_register import ControlRegister +from pretor.utils.inspector import inspector + +#control_node 管控节点,掌管系统的全局状态 +@ray.remote +class ControlNode(RunnableObject): + def __init__(self, **kwargs: Dict[str: Any]) -> None: + self.model_adapter : ModelBase = kwargs.get("model_adapter") + self.model : str = kwargs.get("model") + self.name : str = kwargs.get("name", "管控节点") + self.control_register = ControlRegister() + + def _load_control_register(self) : + pass + + @inspector("individual","control_node") + async def run(self, workflow : PretorWorkflow) -> None: + control_register = self.control_register.model_dump_json() + demand = workflow.status.content.demand.model_dump_json() + + diff --git a/pretor/individual_plugin/control_node/control_register.py b/pretor/individual_plugin/control_node/control_register.py new file mode 100644 index 0000000..f3f3c89 --- /dev/null +++ b/pretor/individual_plugin/control_node/control_register.py @@ -0,0 +1,22 @@ +from pydantic import BaseModel, Field +from typing import List, Dict, Any, Literal, Union, Optional + +class SystemItem(BaseModel): + command_template: str = Field(..., description="底层 shell 命令模板") + args_schema: Dict[str, Any] = Field(default_factory=dict, description="该指令接受的参数约束") + +class IndividualItem(BaseModel): + description: str + params: Dict[str: str] + base_prompt: str = Field(..., description="个体的基础人格/背景设定") + +class ToolItem(BaseModel): + description: str + plugin_path: str = Field(..., description="插件物理路径或类路径") + +class ControlRegister(BaseModel): + # 统一使用 Dict,方便通过 name 快速索引:{ "name": ItemObject } + system_registry: Dict[str, SystemItem] = Field(default_factory=dict) + individual_registry: Dict[str, IndividualItem] = Field(default_factory=dict) + tool_registry: Dict[str, ToolItem] = Field(default_factory=dict) + global_information : Dict[str, str] = Field(default_factory=dict) \ No newline at end of file diff --git a/archonbot/individual_plugin/growth_node.py b/pretor/individual_plugin/growth_node/__init__.py similarity index 100% rename from archonbot/individual_plugin/growth_node.py rename to pretor/individual_plugin/growth_node/__init__.py diff --git a/archonbot/individual_plugin/supervisory_node.py b/pretor/individual_plugin/growth_node/growth_node.py similarity index 100% rename from archonbot/individual_plugin/supervisory_node.py rename to pretor/individual_plugin/growth_node/growth_node.py diff --git a/archonbot/tool_plugin/docker_sandbox/sandbox_image/main.py b/pretor/individual_plugin/supervisory_node/__init__.py similarity index 100% rename from archonbot/tool_plugin/docker_sandbox/sandbox_image/main.py rename to pretor/individual_plugin/supervisory_node/__init__.py diff --git a/pretor/individual_plugin/supervisory_node/supervisory_node.py b/pretor/individual_plugin/supervisory_node/supervisory_node.py new file mode 100644 index 0000000..e69de29 diff --git a/pretor/tool_plugin/__init__.py b/pretor/tool_plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pretor/utils/error.py b/pretor/utils/error.py new file mode 100644 index 0000000..2033664 --- /dev/null +++ b/pretor/utils/error.py @@ -0,0 +1,2 @@ +class DemandError(Exception): + pass \ No newline at end of file diff --git a/pretor/utils/inspector.py b/pretor/utils/inspector.py new file mode 100644 index 0000000..d916df1 --- /dev/null +++ b/pretor/utils/inspector.py @@ -0,0 +1,11 @@ +from pretor.utils.error import DemandError + +def inspector(variety: str, name: str): + def decorator(func): + def wrapper(*args, **kwargs): + demand = args[1].status.demand + if demand.variety != variety and demand.name != name: + raise DemandError("需求目标对象错误或名称错误!") + return func(*args, **kwargs) + return wrapper + return decorator \ No newline at end of file diff --git a/pretor/workflow_plugin/programme_workflow_template.json b/pretor/workflow_plugin/programme_workflow_template.json new file mode 100644 index 0000000..973d52c --- /dev/null +++ b/pretor/workflow_plugin/programme_workflow_template.json @@ -0,0 +1,82 @@ +{ + "name": "programme", + "desc": "一个示范型的编程工作流", + "work_link": [ + { + "step": 1, + "node": "consciousness_node", + "action": "architect", + "desc": "【人类规范】分析用户需求,构建程序整体架构,定义需要拉起的子个体名称与数量。" + }, + { + "step": 2, + "node": "control_node", + "action": "spawn_actors", + "desc": "【人类规范】根据架构要求,拉起对应的开发与测试工作组,并挂载 /workspace 目录。" + }, + { + "step": 3, + "node": "composite_individual", + "action": "decompose", + "desc": "【人类规范】将整体架构拆解为可独立执行的原子任务包 (Task Packets)。", + "output": "task_packets" + }, + { + "step": 4, + "node": "primary_individual", + "action": "execute_code", + "desc": "【人类规范】执行编码任务,必须确保所有代码写入指定的挂载目录。", + "input": "task_packets", + "output": "source_code" + }, + { + "step": 5, + "node": "composite_individual", + "action": "audit", + "desc": "【人类规范】对产出的源码进行静态逻辑检查与 PEP8 代码规范审计。", + "input": "source_code", + "output": "audit_report" + }, + { + "step": 6, + "node": "control_node", + "action": "resource_recycle", + "desc": "【安全规范】暂存当前编码子个体的状态,释放非必要显存,为测试环境腾出算力。", + "input": "audit_report" + }, + { + "step": 7, + "node": "consciousness_node", + "action": "design_test", + "desc": "【人类规范】基于源码设计测试用例架构,覆盖边缘场景。", + "input": "source_code", + "output": "test_spec" + }, + { + "step": 8, + "node": "primary_individual", + "action": "run_test", + "desc": "【人类规范】在独立的 Docker 沙箱中运行 test,并生成结构化的实验报告。", + "input": "test_spec", + "output": "test_report" + }, + { + "step": 9, + "node": "consciousness_node", + "action": "analyze_report", + "desc": "【逻辑网关】研究测试报告。如果存在 Error 或 Fail,必须触发逻辑跳转,重写代码。", + "input": "test_report", + "logic_gate": { + "if_fail": "jump_to_step_4", + "if_pass": "continue" + } + }, + { + "step": 10, + "node": "supervisory_node", + "action": "terminate_workflow", + "desc": "【系统规范】核对所有产出物,关闭工作流管道,向宿主机发送 .done 信号。", + "input": ["source_code", "test_report"] + } + ] +} \ No newline at end of file