[3af7d7]: / aiagents4pharma / talk2biomodels / tests / test_getmodelinfo.py

Download this file

92 lines (84 with data), 3.3 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
'''
Test cases for Talk2Biomodels get_modelinfo tool.
'''
from langchain_core.messages import HumanMessage, ToolMessage
from langchain_openai import ChatOpenAI
from ..agents.t2b_agent import get_app
LLM_MODEL = ChatOpenAI(model='gpt-4o-mini',temperature=0)
def test_get_modelinfo_tool():
'''
Test the get_modelinfo tool.
'''
unique_id = 12345
app = get_app(unique_id, LLM_MODEL)
config = {"configurable": {"thread_id": unique_id}}
# Update state
app.update_state(config,
{"sbml_file_path": ["aiagents4pharma/talk2biomodels/tests/BIOMD0000000449_url.xml"]})
prompt = "Extract all relevant information from the uploaded model."
# Test the tool get_modelinfo
response = app.invoke(
{"messages": [HumanMessage(content=prompt)]},
config=config
)
assistant_msg = response["messages"][-1].content
# Check if the assistant message is a string
assert isinstance(assistant_msg, str)
def test_model_with_no_species():
'''
Test the get_modelinfo tool with a model that does not
return any species.
This should raise a tool error.
'''
unique_id = 12345
app = get_app(unique_id, LLM_MODEL)
config = {"configurable": {"thread_id": unique_id}}
prompt = "Extract all species from model 20"
# Test the tool get_modelinfo
app.invoke(
{"messages": [HumanMessage(content=prompt)]},
config=config
)
current_state = app.get_state(config)
reversed_messages = current_state.values["messages"][::-1]
# Loop through the reversed messages until a
# ToolMessage is found.
test_condition = False
for msg in reversed_messages:
# Check if the message is a ToolMessage from the get_modelinfo tool
if isinstance(msg, ToolMessage) and msg.name == "get_modelinfo":
# Check if the message is an error message
if (msg.status == "error" and
"ValueError('Unable to extract species from the model.')" in msg.content):
test_condition = True
break
assert test_condition
def test_model_with_no_parameters():
'''
Test the get_modelinfo tool with a model that does not
return any parameters.
This should raise a tool error.
'''
unique_id = 12345
app = get_app(unique_id, LLM_MODEL)
config = {"configurable": {"thread_id": unique_id}}
prompt = "Extract all parameters from model 10"
# Test the tool get_modelinfo
app.invoke(
{"messages": [HumanMessage(content=prompt)]},
config=config
)
current_state = app.get_state(config)
reversed_messages = current_state.values["messages"][::-1]
# Loop through the reversed messages until a
# ToolMessage is found.
test_condition = False
for msg in reversed_messages:
# Check if the message is a ToolMessage from the get_modelinfo tool
if isinstance(msg, ToolMessage) and msg.name == "get_modelinfo":
# Check if the message is an error message
if (msg.status == "error" and
"ValueError('Unable to extract parameters from the model.')" in msg.content):
test_condition = True
break
assert test_condition