summaryrefslogtreecommitdiff
path: root/rag/generator
diff options
context:
space:
mode:
Diffstat (limited to 'rag/generator')
-rw-r--r--rag/generator/__init__.py4
-rw-r--r--rag/generator/abstract.py4
-rw-r--r--rag/generator/ollama.py10
-rw-r--r--rag/generator/prompt.py4
4 files changed, 9 insertions, 13 deletions
diff --git a/rag/generator/__init__.py b/rag/generator/__init__.py
index ba23ffc..a776231 100644
--- a/rag/generator/__init__.py
+++ b/rag/generator/__init__.py
@@ -4,11 +4,11 @@ from .abstract import AbstractGenerator
from .cohere import Cohere
from .ollama import Ollama
-MODELS = ["ollama", "cohere"]
+MODELS = ["local", "cohere"]
def get_generator(model: str) -> Type[AbstractGenerator]:
match model:
- case "ollama":
+ case "local":
return Ollama()
case "cohere":
return Cohere()
diff --git a/rag/generator/abstract.py b/rag/generator/abstract.py
index 439c1b5..1beacfb 100644
--- a/rag/generator/abstract.py
+++ b/rag/generator/abstract.py
@@ -16,7 +16,3 @@ class AbstractGenerator(type):
@abstractmethod
def generate(self, prompt: Prompt) -> Generator[Any, Any, Any]:
pass
-
- @abstractmethod
- def rerank(self, prompt: Prompt) -> Prompt:
- return prompt
diff --git a/rag/generator/ollama.py b/rag/generator/ollama.py
index b72d763..9118906 100644
--- a/rag/generator/ollama.py
+++ b/rag/generator/ollama.py
@@ -1,5 +1,5 @@
import os
-from typing import Any, Dict, Generator, List
+from typing import Any, Generator, List
import ollama
from loguru import logger as log
@@ -24,12 +24,12 @@ class Ollama(metaclass=AbstractGenerator):
def __metaprompt(self, prompt: Prompt) -> str:
metaprompt = (
- "Answer the question based only on the following context:\n"
- "<context>\n"
- f"{self.__context(prompt.documents)}\n\n"
- "</context>\n"
f"{ANSWER_INSTRUCTION}"
+ "Only the information between <results>...</results> should be used to answer the question.\n"
f"Question: {prompt.query.strip()}\n\n"
+ "<results>\n"
+ f"{self.__context(prompt.documents)}\n\n"
+ "</results>\n"
"Answer:"
)
return metaprompt
diff --git a/rag/generator/prompt.py b/rag/generator/prompt.py
index fa007db..f607122 100644
--- a/rag/generator/prompt.py
+++ b/rag/generator/prompt.py
@@ -5,8 +5,8 @@ from rag.retriever.vector import Document
ANSWER_INSTRUCTION = (
"Given the context information and not prior knowledge, answer the question."
- "If the context is irrelevant to the question, answer that you do not know "
- "the answer to the question given the context and stop.\n"
+ "If the context is irrelevant to the question or empty, then do not attempt to answer "
+ "the question, just reply that you do not know based on the context provided.\n"
)