blob: 8c7702f7bf42db1b852966a2abdb3eaf1202ce6a (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
|
import os
from dataclasses import dataclass
import ollama
from loguru import logger as log
@dataclass
class Prompt:
query: str
context: str
class Generator:
def __init__(self) -> None:
self.model = os.environ["GENERATOR_MODEL"]
def __metaprompt(self, prompt: Prompt) -> str:
metaprompt = (
"Answer the following question using the provided context.\n"
"If you can't find the answer, do not pretend you know it,"
'but answer "I don\'t know".\n\n'
f"Question: {prompt.query.strip()}\n\n"
"Context:\n"
f"{prompt.context.strip()}\n\n"
"Answer:\n"
)
return metaprompt
def generate(self, prompt: Prompt) -> str:
log.debug("Generating answer...")
metaprompt = self.__metaprompt(prompt)
return ollama.generate(model=self.model, prompt=metaprompt)
|