blob: 5f164e72e8eccc78b15a8caba840072b91609a2f (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
|
import os
from dataclasses import dataclass
import ollama
from loguru import logger as log
@dataclass
class Prompt:
query: str
context: str
class Generator:
def __init__(self) -> None:
self.model = os.environ["GENERATOR_MODEL"]
def __metaprompt(self, role: str, prompt: Prompt) -> str:
metaprompt = (
f"You are a {role}.\n"
"Answer the following question using the provided context.\n"
"If you can't find the answer, do not pretend you know it,"
'but answer "I don\'t know".\n\n'
f"Question: {prompt.query.strip()}\n\n"
"Context:\n"
f"{prompt.context.strip()}\n\n"
"Answer:\n"
)
return metaprompt
def generate(self, prompt: Prompt, role: str) -> str:
log.debug("Generating answer...")
metaprompt = self.__metaprompt(role, prompt)
print(f"metaprompt = \n{metaprompt}")
return ollama.generate(model=self.model, prompt=metaprompt)
|