From db8b4891a0a46b2c6da31f1a50d6501e9904fd26 Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Sun, 14 Apr 2024 23:14:33 +0200 Subject: Format --- rag/generator/ollama.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/rag/generator/ollama.py b/rag/generator/ollama.py index b475dcf..77f8304 100644 --- a/rag/generator/ollama.py +++ b/rag/generator/ollama.py @@ -13,6 +13,7 @@ from .prompt import ANSWER_INSTRUCTION, Prompt class Ollama(metaclass=AbstractGenerator): def __init__(self) -> None: self.model = os.environ["GENERATOR_MODEL"] + log.debug(f"Using {self.model} for generator...") def __context(self, documents: List[Document]) -> str: results = [ @@ -39,7 +40,9 @@ class Ollama(metaclass=AbstractGenerator): for chunk in ollama.generate(model=self.model, prompt=metaprompt, stream=True): yield chunk["response"] - def chat(self, prompt: Prompt, messages: List[Dict[str, str]]) -> Generator[Any, Any, Any]: + def chat( + self, prompt: Prompt, messages: List[Dict[str, str]] + ) -> Generator[Any, Any, Any]: log.debug("Generating answer with ollama...") metaprompt = self.__metaprompt(prompt) messages.append({"role": "user", "content": metaprompt}) -- cgit v1.2.3-70-g09d2