[9d3784]: / aiagents4pharma / talk2knowledgegraphs / tests / test_utils_enrichments_ollama.py

Download this file

94 lines (78 with data), 3.0 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
"""
Test cases for utils/enrichments/ollama.py
"""
import pytest
import ollama
from ..utils.enrichments.ollama import EnrichmentWithOllama
@pytest.fixture(name="ollama_config")
def fixture_ollama_config():
"""Return a dictionary with Ollama configuration."""
return {
"model_name": "llama3.2:1b",
"prompt_enrichment": """
Given the input as a list of strings, please return the list of addditional information
of each input terms using your prior knowledge.
Example:
Input: ['acetaminophen', 'aspirin']
Ouput: ['acetaminophen is a medication used to treat pain and fever',
'aspirin is a medication used to treat pain, fever, and inflammation']
Do not include any pretext as the output, only the list of strings enriched.
Input: {input}
""",
"temperature": 0.0,
"streaming": False,
}
def test_no_model_ollama(ollama_config):
"""Test the case when the Ollama model is not available."""
cfg = ollama_config
cfg_model = "smollm2:135m" # Choose a small model
# Delete the Ollama model
try:
ollama.delete(cfg_model)
except ollama.ResponseError:
pass
# Check if the model is available
with pytest.raises(
ValueError,
match=f"Error: Pulled {cfg_model} model and restarted Ollama server.",
):
EnrichmentWithOllama(
model_name=cfg_model,
prompt_enrichment=cfg["prompt_enrichment"],
temperature=cfg["temperature"],
streaming=cfg["streaming"],
)
ollama.delete(cfg_model)
def test_enrich_ollama(ollama_config):
"""Test the Ollama textual enrichment class for node enrichment."""
# Prepare enrichment model
cfg = ollama_config
enr_model = EnrichmentWithOllama(
model_name=cfg["model_name"],
prompt_enrichment=cfg["prompt_enrichment"],
temperature=cfg["temperature"],
streaming=cfg["streaming"],
)
# Perform enrichment for nodes
nodes = ["acetaminophen"]
enriched_nodes = enr_model.enrich_documents(nodes)
# Check the enriched nodes
assert len(enriched_nodes) == 1
assert all(enriched_nodes[i] != nodes[i] for i in range(len(nodes)))
def test_enrich_ollama_rag(ollama_config):
"""Test the Ollama textual enrichment class for enrichment with RAG (not implemented)."""
# Prepare enrichment model
cfg = ollama_config
enr_model = EnrichmentWithOllama(
model_name=cfg["model_name"],
prompt_enrichment=cfg["prompt_enrichment"],
temperature=cfg["temperature"],
streaming=cfg["streaming"],
)
# Perform enrichment for nodes
nodes = ["acetaminophen"]
docs = [r"\path\to\doc1", r"\path\to\doc2"]
enriched_nodes = enr_model.enrich_documents_with_rag(nodes, docs)
# Check the enriched nodes
assert len(enriched_nodes) == 1
assert all(enriched_nodes[i] != nodes[i] for i in range(len(nodes)))