From 4b5a939a36c64175b7497d05b3417ee46371a917 Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Sat, 6 Apr 2024 00:19:12 +0200 Subject: Add generator --- rag/llm/generator.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/rag/llm/generator.py b/rag/llm/generator.py index e69de29..cbe9474 100644 --- a/rag/llm/generator.py +++ b/rag/llm/generator.py @@ -0,0 +1,35 @@ +import os +from dataclasses import dataclass + +import ollama + + +@dataclass +class Prompt: + question: str + context: str + + # def context(self) -> str: + # return "\n".join(point.payload["text"] for point in self.points) + + +class Generator: + def __init__(self) -> None: + self.model = os.environ["GENERATOR_MODEL"] + + def __metaprompt(self, role: str, prompt: Prompt) -> str: + metaprompt = ( + f"You are a {role}.\n" + "Answer the following question using the provided context.\n" + "If you can't find the answer, do not pretend you know it," + 'but answer "I don\'t know".' + f"Question: {prompt.question.strip()}\n\n" + "Context:\n" + f"{prompt.context.strip()}\n\n" + "Answer:\n" + ) + return metaprompt + + def generate(self, role: str, prompt: Prompt) -> str: + metaprompt = self.__metaprompt(role, prompt) + return ollama.generate(model=self.model, prompt=metaprompt) -- cgit v1.2.3-70-g09d2