莘权 马
commited on
Commit
·
65f7c11
1
Parent(s):
755c751
feat: upgrade metagp==0.6.5
Browse files- app.py +9 -7
- data_model.py +15 -1
- requirements.txt +1 -1
- software_company.py +16 -11
app.py
CHANGED
@@ -19,7 +19,7 @@ import openai
|
|
19 |
import tenacity
|
20 |
import uvicorn
|
21 |
from fastapi import FastAPI, Request
|
22 |
-
from fastapi.responses import
|
23 |
from fastapi.staticfiles import StaticFiles
|
24 |
from loguru import logger
|
25 |
from metagpt.config import CONFIG
|
@@ -28,17 +28,17 @@ from metagpt.schema import Message
|
|
28 |
from openai import OpenAI
|
29 |
|
30 |
from data_model import (
|
31 |
-
|
32 |
MessageJsonModel,
|
33 |
-
|
34 |
Sentence,
|
|
|
35 |
SentenceType,
|
36 |
SentenceValue,
|
37 |
ThinkActPrompt,
|
38 |
-
LLMAPIkeyTest,
|
39 |
ThinkActStep,
|
40 |
)
|
41 |
-
from message_enum import
|
42 |
from software_company import RoleRun, SoftwareCompany
|
43 |
|
44 |
|
@@ -64,8 +64,10 @@ class Service:
|
|
64 |
Sentences(
|
65 |
contents=[
|
66 |
Sentence(
|
67 |
-
type=SentenceType.TEXT.value,
|
68 |
-
|
|
|
|
|
69 |
],
|
70 |
status=MessageStatus.COMPLETE.value,
|
71 |
)
|
|
|
19 |
import tenacity
|
20 |
import uvicorn
|
21 |
from fastapi import FastAPI, Request
|
22 |
+
from fastapi.responses import JSONResponse, StreamingResponse
|
23 |
from fastapi.staticfiles import StaticFiles
|
24 |
from loguru import logger
|
25 |
from metagpt.config import CONFIG
|
|
|
28 |
from openai import OpenAI
|
29 |
|
30 |
from data_model import (
|
31 |
+
LLMAPIkeyTest,
|
32 |
MessageJsonModel,
|
33 |
+
NewMsg,
|
34 |
Sentence,
|
35 |
+
Sentences,
|
36 |
SentenceType,
|
37 |
SentenceValue,
|
38 |
ThinkActPrompt,
|
|
|
39 |
ThinkActStep,
|
40 |
)
|
41 |
+
from message_enum import MessageStatus, QueryAnswerType
|
42 |
from software_company import RoleRun, SoftwareCompany
|
43 |
|
44 |
|
|
|
64 |
Sentences(
|
65 |
contents=[
|
66 |
Sentence(
|
67 |
+
type=SentenceType.TEXT.value,
|
68 |
+
value=SentenceValue(answer=req_model.query),
|
69 |
+
is_finished=True,
|
70 |
+
).model_dump()
|
71 |
],
|
72 |
status=MessageStatus.COMPLETE.value,
|
73 |
)
|
data_model.py
CHANGED
@@ -4,7 +4,7 @@ from typing import Any, Optional, Union
|
|
4 |
|
5 |
from metagpt.actions.action import Action
|
6 |
from metagpt.actions.action_output import ActionOutput
|
7 |
-
from pydantic import BaseModel, Field
|
8 |
|
9 |
from message_enum import SentenceType
|
10 |
|
@@ -19,6 +19,13 @@ class Sentence(BaseModel):
|
|
19 |
value: SentenceValue
|
20 |
is_finished: Optional[bool] = None
|
21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
class Sentences(BaseModel):
|
24 |
id: Optional[str] = None
|
@@ -58,6 +65,13 @@ class ThinkActStep(BaseModel):
|
|
58 |
description: str
|
59 |
content: Sentence = None
|
60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
class ThinkActPrompt(BaseModel):
|
63 |
message_id: int = None
|
|
|
4 |
|
5 |
from metagpt.actions.action import Action
|
6 |
from metagpt.actions.action_output import ActionOutput
|
7 |
+
from pydantic import BaseModel, Field, field_validator
|
8 |
|
9 |
from message_enum import SentenceType
|
10 |
|
|
|
19 |
value: SentenceValue
|
20 |
is_finished: Optional[bool] = None
|
21 |
|
22 |
+
@field_validator("id", mode="before")
|
23 |
+
@classmethod
|
24 |
+
def validate_credits(cls, v):
|
25 |
+
if isinstance(v, str):
|
26 |
+
return v
|
27 |
+
return str(v)
|
28 |
+
|
29 |
|
30 |
class Sentences(BaseModel):
|
31 |
id: Optional[str] = None
|
|
|
65 |
description: str
|
66 |
content: Sentence = None
|
67 |
|
68 |
+
@field_validator("id", mode="before")
|
69 |
+
@classmethod
|
70 |
+
def validate_credits(cls, v):
|
71 |
+
if isinstance(v, str):
|
72 |
+
return v
|
73 |
+
return str(v)
|
74 |
+
|
75 |
|
76 |
class ThinkActPrompt(BaseModel):
|
77 |
message_id: int = None
|
requirements.txt
CHANGED
@@ -3,4 +3,4 @@ aiozipstream==0.4
|
|
3 |
aioboto3~=11.3.0
|
4 |
fastapi
|
5 |
uvicorn
|
6 |
-
|
|
|
3 |
aioboto3~=11.3.0
|
4 |
fastapi
|
5 |
uvicorn
|
6 |
+
metagpt==0.6.5
|
software_company.py
CHANGED
@@ -35,12 +35,16 @@ from zipstream import AioZipStream
|
|
35 |
_default_llm_stream_log = partial(print, end="")
|
36 |
|
37 |
|
|
|
|
|
|
|
|
|
38 |
class RoleRun(Action):
|
39 |
role: Role
|
40 |
|
41 |
def __init__(self, *args, **kwargs):
|
42 |
super().__init__(*args, **kwargs)
|
43 |
-
action = self.role.
|
44 |
self.desc = f"{self.role.profile} {action.desc or str(action)}"
|
45 |
|
46 |
|
@@ -53,9 +57,10 @@ class PackProject(Action):
|
|
53 |
|
54 |
async def run(self, key: str):
|
55 |
url = await self.upload(key)
|
|
|
56 |
mdfile = MdUtils(None)
|
57 |
mdfile.new_line(mdfile.new_inline_link(url, url.rsplit("/", 1)[-1]))
|
58 |
-
return ActionOutput(mdfile.get_md_text(),
|
59 |
|
60 |
async def upload(self, key: str):
|
61 |
files = []
|
@@ -97,7 +102,7 @@ class SoftwareCompany(Role):
|
|
97 |
BossRequirement -> WritePRD -> WriteDesign -> WriteTasks -> WriteCode ->
|
98 |
"""
|
99 |
if self.finish:
|
100 |
-
self.
|
101 |
return False
|
102 |
|
103 |
if self.git_repo is not None:
|
@@ -107,16 +112,16 @@ class SoftwareCompany(Role):
|
|
107 |
for role in environment.roles.values():
|
108 |
if await role._observe():
|
109 |
await role._think()
|
110 |
-
if isinstance(role.
|
111 |
self.active_role = role
|
112 |
await self.act()
|
113 |
self.git_repo = CONFIG.git_repo
|
114 |
return await self._think()
|
115 |
|
116 |
-
if isinstance(role.
|
117 |
return await self._think()
|
118 |
|
119 |
-
self.
|
120 |
self.active_role = role
|
121 |
return True
|
122 |
|
@@ -129,15 +134,15 @@ class SoftwareCompany(Role):
|
|
129 |
CONFIG.src_workspace = CONFIG.git_repo.workdir / CONFIG.git_repo.workdir.name
|
130 |
CONFIG.max_auto_summarize_code = self.max_auto_summarize_code
|
131 |
|
132 |
-
if isinstance(self.
|
133 |
workdir = CONFIG.git_repo.workdir
|
134 |
name = workdir.name
|
135 |
uid = workdir.parent.name
|
136 |
now = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
137 |
key = f"{uid}/metagpt-{name}-{now}.zip"
|
138 |
-
output = await self.
|
139 |
self.finish = True
|
140 |
-
return Message(output.content, role=self.profile, cause_by=type(self.
|
141 |
|
142 |
default_log_stream = CONFIG.get("LLM_STREAM_LOG", _default_llm_stream_log)
|
143 |
|
@@ -344,7 +349,7 @@ class SoftwareCompany(Role):
|
|
344 |
|
345 |
async def think(self):
|
346 |
await self._think()
|
347 |
-
return self.
|
348 |
|
349 |
async def act(self):
|
350 |
return await self._act()
|
@@ -353,7 +358,7 @@ class SoftwareCompany(Role):
|
|
353 |
stg_path = SERDESER_PATH.joinpath("software_company") if stg_path is None else stg_path
|
354 |
|
355 |
team_info_path = stg_path.joinpath("software_company_info.json")
|
356 |
-
write_json_file(team_info_path, self.
|
357 |
|
358 |
self.company.serialize(stg_path.joinpath("company")) # save company alone
|
359 |
|
|
|
35 |
_default_llm_stream_log = partial(print, end="")
|
36 |
|
37 |
|
38 |
+
class PackInfo(BaseModel):
|
39 |
+
url: str
|
40 |
+
|
41 |
+
|
42 |
class RoleRun(Action):
|
43 |
role: Role
|
44 |
|
45 |
def __init__(self, *args, **kwargs):
|
46 |
super().__init__(*args, **kwargs)
|
47 |
+
action = self.role.rc.todo
|
48 |
self.desc = f"{self.role.profile} {action.desc or str(action)}"
|
49 |
|
50 |
|
|
|
57 |
|
58 |
async def run(self, key: str):
|
59 |
url = await self.upload(key)
|
60 |
+
info = PackInfo(url=url)
|
61 |
mdfile = MdUtils(None)
|
62 |
mdfile.new_line(mdfile.new_inline_link(url, url.rsplit("/", 1)[-1]))
|
63 |
+
return ActionOutput(mdfile.get_md_text(), info)
|
64 |
|
65 |
async def upload(self, key: str):
|
66 |
files = []
|
|
|
102 |
BossRequirement -> WritePRD -> WriteDesign -> WriteTasks -> WriteCode ->
|
103 |
"""
|
104 |
if self.finish:
|
105 |
+
self.rc.todo = None
|
106 |
return False
|
107 |
|
108 |
if self.git_repo is not None:
|
|
|
112 |
for role in environment.roles.values():
|
113 |
if await role._observe():
|
114 |
await role._think()
|
115 |
+
if isinstance(role.rc.todo, PrepareDocuments):
|
116 |
self.active_role = role
|
117 |
await self.act()
|
118 |
self.git_repo = CONFIG.git_repo
|
119 |
return await self._think()
|
120 |
|
121 |
+
if isinstance(role.rc.todo, SummarizeCode):
|
122 |
return await self._think()
|
123 |
|
124 |
+
self.rc.todo = RoleRun(role=role)
|
125 |
self.active_role = role
|
126 |
return True
|
127 |
|
|
|
134 |
CONFIG.src_workspace = CONFIG.git_repo.workdir / CONFIG.git_repo.workdir.name
|
135 |
CONFIG.max_auto_summarize_code = self.max_auto_summarize_code
|
136 |
|
137 |
+
if isinstance(self.rc.todo, PackProject):
|
138 |
workdir = CONFIG.git_repo.workdir
|
139 |
name = workdir.name
|
140 |
uid = workdir.parent.name
|
141 |
now = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
142 |
key = f"{uid}/metagpt-{name}-{now}.zip"
|
143 |
+
output = await self.rc.todo.run(key)
|
144 |
self.finish = True
|
145 |
+
return Message(output.content, role=self.profile, cause_by=type(self.rc.todo))
|
146 |
|
147 |
default_log_stream = CONFIG.get("LLM_STREAM_LOG", _default_llm_stream_log)
|
148 |
|
|
|
349 |
|
350 |
async def think(self):
|
351 |
await self._think()
|
352 |
+
return self.rc.todo
|
353 |
|
354 |
async def act(self):
|
355 |
return await self._act()
|
|
|
358 |
stg_path = SERDESER_PATH.joinpath("software_company") if stg_path is None else stg_path
|
359 |
|
360 |
team_info_path = stg_path.joinpath("software_company_info.json")
|
361 |
+
write_json_file(team_info_path, self.model_dump(exclude={"company": True}))
|
362 |
|
363 |
self.company.serialize(stg_path.joinpath("company")) # save company alone
|
364 |
|