Skip to content

Commit 816cb2d

Browse files
authored
Merge pull request #152 from TencentCloudBase/feature/http_functions_python
Feature/http functions python
2 parents cfd83eb + 3808b20 commit 816cb2d

File tree

11 files changed

+445
-0
lines changed

11 files changed

+445
-0
lines changed
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
# OpenAI-Compatible API Configuration
2+
# Copy this file to .env and fill in your values
3+
4+
# Model name (e.g., gpt-4o-mini, qwen-plus, glm-4)
5+
OPENAI_MODEL=gpt-4o-mini
6+
7+
# API Base URL (optional, defaults to OpenAI)
8+
OPENAI_BASE_URL=your_base_url_here
9+
10+
# API Key (required)
11+
OPENAI_API_KEY=your_api_key_here
12+
13+
# Temperature (default: 0.7)
14+
OPENAI_TEMPERATURE=0.7
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
"""Agentic Chat Agent Implementation.
2+
3+
This module implements a conversational chat agent using CrewAI Flow framework
4+
integrated with Cloudbase Agent. The agent uses LiteLLM for model interactions and
5+
supports streaming responses.
6+
"""
7+
8+
import os
9+
import sys
10+
import pysqlite3
11+
from pathlib import Path
12+
13+
# Configure HOME and CrewAI storage directories for SCF environment
14+
os.environ["HOME"] = "/tmp"
15+
os.environ["CREWAI_STORAGE_DIR"] = "/tmp"
16+
17+
# Create required directories
18+
credentials_dir = Path("/tmp/.local/share/crewai/credentials")
19+
credentials_dir.mkdir(parents=True, exist_ok=True)
20+
21+
# Replace sqlite3 with pysqlite3 for SCF compatibility
22+
sys.modules['sqlite3'] = pysqlite3
23+
24+
from crewai import Crew, Agent, Task
25+
26+
try:
27+
from crewai.flow import Flow, start, persist
28+
except ModuleNotFoundError as exc:
29+
raise ImportError(
30+
"crewai.flow is required. Please install a CrewAI version that includes Flow (e.g., crewai>=1.7.2)."
31+
) from exc
32+
from litellm import acompletion
33+
from crewai.events.event_bus import crewai_event_bus
34+
from ag_ui.core import EventType
35+
from cloudbase_agent.crewai import CrewAIAgent as _BaseCrewAIAgent
36+
from cloudbase_agent.crewai.converters import CopilotKitState
37+
from cloudbase_agent.crewai.context import flow_context
38+
from cloudbase_agent.crewai.events import BridgedTextMessageChunkEvent
39+
40+
from dotenv import load_dotenv
41+
load_dotenv()
42+
43+
class CrewAIAgent(_BaseCrewAIAgent):
44+
"""Override run to avoid BaseAgent.as_current cross-context reset."""
45+
46+
async def run(self, run_input):
47+
if getattr(self, "_should_fix_event_ids", True):
48+
async for event in self._run_internal(run_input):
49+
yield super()._fix_event_ids(event, run_input.thread_id, run_input.run_id)
50+
else:
51+
async for event in self._run_internal(run_input):
52+
yield event
53+
54+
55+
@persist()
56+
class AgenticChatFlow(Flow[CopilotKitState]):
57+
"""Conversational chat flow using CrewAI framework.
58+
59+
This flow implements a basic chat agent that processes user messages
60+
and generates streaming responses using LiteLLM completion API.
61+
62+
:ivar state: Flow state containing conversation messages and CopilotKit context
63+
:type state: CopilotKitState
64+
"""
65+
66+
@start()
67+
async def chat(self) -> None:
68+
"""Process chat messages and generate streaming responses.
69+
70+
This method is the entry point of the flow. It sends messages to the LLM,
71+
streams the response back, and updates the conversation state.
72+
73+
The method:
74+
1. Constructs messages with system prompt and conversation history
75+
2. Calls LiteLLM completion API with streaming enabled
76+
3. Wraps the response in copilotkit_stream for proper formatting
77+
4. Appends the assistant's response to conversation state
78+
79+
:raises Exception: If LLM completion fails or streaming encounters errors
80+
"""
81+
system_prompt = "You are a helpful assistant."
82+
83+
try:
84+
model_name = os.getenv("OPENAI_MODEL")
85+
base_url = os.getenv("OPENAI_BASE_URL")
86+
api_key = os.getenv("OPENAI_API_KEY")
87+
tools = getattr(self.state.copilotkit, "actions", [])
88+
tools_arg = tools if tools else None
89+
90+
# Run the model and stream the response
91+
stream = await acompletion(
92+
model=model_name,
93+
messages=[{"role": "system", "content": system_prompt}, *self.state.messages],
94+
tools=tools_arg,
95+
parallel_tool_calls=False,
96+
stream=False,
97+
base_url=base_url,
98+
api_key=api_key,
99+
custom_llm_provider="openai",
100+
)
101+
102+
message = stream.choices[0].message
103+
content = message.content if hasattr(message, "content") else None
104+
105+
if content:
106+
flow = flow_context.get(None)
107+
if flow is not None:
108+
crewai_event_bus.emit(
109+
flow,
110+
BridgedTextMessageChunkEvent(
111+
type=EventType.TEXT_MESSAGE_CHUNK,
112+
message_id=getattr(message, "id", None) or stream.id,
113+
role="assistant",
114+
delta=content,
115+
),
116+
)
117+
118+
self.state.messages.append(message)
119+
except Exception as e:
120+
print(f"[CrewAI Flow Chat] {e}")
121+
122+
123+
def build_chat_workflow() -> AgenticChatFlow:
124+
"""Build and return a new chat workflow instance.
125+
126+
This factory function creates a fresh instance of AgenticChatFlow
127+
for each conversation to ensure proper isolation.
128+
129+
:return: New instance of the chat workflow
130+
:rtype: AgenticChatFlow
131+
"""
132+
return AgenticChatFlow()

httpfunctions/crewai-python/app.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
# -*- coding: utf-8 -*-
2+
from cloudbase_agent.server import AgentServiceApp
3+
4+
import compatibility
5+
6+
from agent import build_chat_workflow, CrewAIAgent
7+
8+
if __name__ == "__main__":
9+
agent = CrewAIAgent(
10+
flow=build_chat_workflow(),
11+
)
12+
AgentServiceApp().run(lambda:{"agent": agent})
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
fastapi==0.125.0
2+
cloudbase-agent-server==0.1.11
3+
cloudbase-agent-core==0.1.11
4+
cloudbase-agent-crewai==0.1.11
5+
crewai==1.7.2
6+
pysqlite3-binary==0.5.4
7+
litellm==1.50.0
8+
openai==1.83.0
9+
python-dotenv==1.1.1
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
#!/bin/bash
2+
export PYTHONPATH="./env:$PYTHONPATH"
3+
/var/lang/python310/bin/python3 -u app.py
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
# OpenAI-Compatible API Configuration
2+
# Copy this file to .env and fill in your values
3+
4+
# Model name (e.g., gpt-4o-mini, qwen-plus, glm-4)
5+
OPENAI_MODEL=gpt-4o-mini
6+
7+
# API Base URL (optional, defaults to OpenAI)
8+
OPENAI_BASE_URL=your_base_url_here
9+
10+
# API Key (required)
11+
OPENAI_API_KEY=your_api_key_here
12+
13+
# Temperature (default: 0.7)
14+
OPENAI_TEMPERATURE=0.7
Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,115 @@
1+
# LangGraph Python Agent 快速开始
2+
3+
本文档介绍如何将一个基于 LangGraph 的 AI Agent 应用部署到 CloudBase HTTP 云函数。该项目使用 `cloudbase-agent-server` 作为应用程序运行框架。
4+
5+
我们这里使用 `python3.10` 进行开发。
6+
7+
## 第1步: 编写基础应用
8+
9+
创建名为 `langgraph-python` 的新项目,并进入此目录中:
10+
11+
```bash
12+
mkdir langgraph-python
13+
cd langgraph-python
14+
```
15+
16+
创建虚拟环境
17+
18+
```bash
19+
python3.10 -m venv venv
20+
source venv/bin/activate # 激活虚拟环境
21+
```
22+
23+
安装依赖组件
24+
25+
```bash
26+
python -m pip install -r ./requirements.txt \
27+
--platform manylinux_2_17_x86_64 \
28+
--target ./env \
29+
--python-version 3.10 \
30+
--only-binary=:all: \
31+
--upgrade
32+
```
33+
34+
## 第2步:配置环境变量
35+
36+
创建 `.env` 文件(参考 `.env.example`):
37+
38+
```bash
39+
OPENAI_API_KEY=your_api_key_here
40+
OPENAI_BASE_URL=https://api.openai.com/v1
41+
OPENAI_MODEL=gpt-4o-mini
42+
```
43+
44+
**注意**: 部署到 SCF 时,需要在云函数控制台配置这些环境变量。
45+
46+
## 第3步:编写代码
47+
48+
### `agent.py` - Agent 实现
49+
50+
```python
51+
from langgraph.graph import StateGraph, MessagesState
52+
from langchain_openai import ChatOpenAI
53+
import os
54+
55+
def chat_node(state, config=None):
56+
api_key = os.getenv("OPENAI_API_KEY")
57+
base_url = os.getenv("OPENAI_BASE_URL")
58+
59+
if not api_key or not base_url:
60+
raise ValueError("Environment variables not set")
61+
62+
chat_model = ChatOpenAI(
63+
model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"),
64+
api_key=api_key,
65+
base_url=base_url,
66+
)
67+
# ... 实现 chat 逻辑
68+
```
69+
70+
### `app.py` - 应用入口
71+
72+
```python
73+
from cloudbase_agent.server import AgentServiceApp
74+
from cloudbase_agent.langgraph import LangGraphAgent
75+
from agent import build_agentic_chat_workflow
76+
77+
if __name__ == "__main__":
78+
agent = LangGraphAgent(graph=build_agentic_chat_workflow())
79+
AgentServiceApp().run(lambda: {"agent": agent})
80+
```
81+
82+
**服务端口**: 默认使用 9000 端口(由 `cloudbase-agent-server` 管理)。
83+
84+
### `scf_bootstrap` - SCF 启动脚本
85+
86+
```bash
87+
#!/bin/bash
88+
export PYTHONPATH="./env:$PYTHONPATH"
89+
/var/lang/python310/bin/python3 -u app.py
90+
```
91+
92+
**说明**:
93+
- 设置 `PYTHONPATH` 指向 `./env` 目录,让 Python 能找到依赖包
94+
- 所有通过 pip 安装的依赖包都存放在 `env/` 目录中
95+
96+
## 第4步:管理项目依赖
97+
98+
### 打包部署
99+
100+
将项目文件打成 zip 包(包含本地依赖):
101+
102+
```bash
103+
zip -r langgraph-python.zip .
104+
```
105+
106+
### 上传部署
107+
108+
1. 选择 **HTTP 云函数**
109+
2. Python 运行时选择 **3.10**
110+
3. 上传 zip 包
111+
4. 在控制台配置环境变量:
112+
- `OPENAI_API_KEY`
113+
- `OPENAI_BASE_URL`
114+
- `OPENAI_MODEL`(可选)
115+
5. 点击部署

0 commit comments

Comments
 (0)