-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathtest_self_reflection.py
More file actions
61 lines (51 loc) · 1.67 KB
/
test_self_reflection.py
File metadata and controls
61 lines (51 loc) · 1.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#!/usr/bin/env python3
"""Test self-reflection functionality in both Agent and LLM classes"""
from praisonaiagents import Agent
from praisonaiagents.llm import LLM
def test_agent_self_reflection():
"""Test self-reflection in Agent class"""
print("=== Testing Agent Self-Reflection ===")
agent = Agent(
name="ReflectiveAgent",
instructions="You are a helpful assistant.",
llm="gpt-4o-mini",
self_reflect=True,
min_reflect=1,
max_reflect=3
)
response = agent.start("What is 2+2? Be brief.")
print(f"Agent Response: {response}")
print()
def test_llm_self_reflection():
"""Test self-reflection in LLM class directly"""
print("=== Testing LLM Self-Reflection ===")
llm = LLM(model="gpt-4o-mini")
response = llm.get_response(
prompt="What is 2+2? Be brief.",
system_prompt="You are a helpful assistant.",
self_reflect=True,
min_reflect=1,
max_reflect=3,
verbose=True
)
print(f"LLM Response: {response}")
print()
def test_llm_no_reflection():
"""Test LLM without self-reflection for comparison"""
print("=== Testing LLM Without Self-Reflection ===")
llm = LLM(model="gpt-4o-mini")
response = llm.get_response(
prompt="What is 2+2? Be brief.",
system_prompt="You are a helpful assistant.",
self_reflect=False,
verbose=True
)
print(f"LLM Response: {response}")
print()
if __name__ == "__main__":
# Test without reflection first
test_llm_no_reflection()
# Test with reflection
test_llm_self_reflection()
# Test agent reflection
test_agent_self_reflection()