Skip to content
Snippets Groups Projects
Commit b247f106 authored by Grießhaber Daniel's avatar Grießhaber Daniel :squid:
Browse files

refactor build_model_input to not modify its parameters

parent 46b178e4
No related branches found
No related tags found
2 merge requests!2remove is_chat argument,!1Refactor models
......@@ -80,7 +80,9 @@ class LLMModel(ABC):
# create prompt
prompt = prompt_prefix + prompt + prompt_suffix + prompt_appendix
messages = [self._get_user_message(prompt)]
model_input = self.build_model_input(prompt, system_message, messages, history)
model_input, messages = self.build_model_input(
prompt, system_message, messages, history
)
reponse, usage = self._create_completion(
**model_input,
......@@ -206,7 +208,7 @@ class Llama(LLMModel):
):
if system_message is not None:
prompt = system_message + prompt
return {"prompt": prompt}
return {"prompt": prompt}, messages
def _create_completion(
self,
......@@ -290,13 +292,12 @@ class LlamaChat(Llama):
# a history is prepended to the messages, and we assume that it also includes a system message, i.e., we never add a system message in this case
# TODO is it better to check for a system message in the history?
if history is not None:
messages = history + messages
[messages.insert(index, entry) for index, entry in enumerate(history)]
elif system_message:
messages.insert(
0,
self._get_system_message(system_message),
)
return {"messages": messages}
messages = [self._get_system_message(system_message)] + messages
return {"messages": messages}, messages
class OpenAI(LLMModel):
......@@ -341,7 +342,7 @@ class OpenAI(LLMModel):
"system_message": system_message,
"messages": messages,
"history": history,
}
}, messages
@classmethod
def register_arguments(cls, parser: ArgumentParser):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment