From b247f1063079f7ccff4e5cfe67520b41543a3f1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Grie=C3=9Fhaber?= <griesshaber@hdm-stuttgart.de> Date: Mon, 19 Aug 2024 14:11:13 +0200 Subject: [PATCH] refactor build_model_input to not modify its parameters --- evoprompt/models.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/evoprompt/models.py b/evoprompt/models.py index a2e3d8c..204b91f 100644 --- a/evoprompt/models.py +++ b/evoprompt/models.py @@ -80,7 +80,9 @@ class LLMModel(ABC): # create prompt prompt = prompt_prefix + prompt + prompt_suffix + prompt_appendix messages = [self._get_user_message(prompt)] - model_input = self.build_model_input(prompt, system_message, messages, history) + model_input, messages = self.build_model_input( + prompt, system_message, messages, history + ) reponse, usage = self._create_completion( **model_input, @@ -206,7 +208,7 @@ class Llama(LLMModel): ): if system_message is not None: prompt = system_message + prompt - return {"prompt": prompt} + return {"prompt": prompt}, messages def _create_completion( self, @@ -290,13 +292,12 @@ class LlamaChat(Llama): # a history is prepended to the messages, and we assume that it also includes a system message, i.e., we never add a system message in this case # TODO is it better to check for a system message in the history? if history is not None: + messages = history + messages [messages.insert(index, entry) for index, entry in enumerate(history)] elif system_message: - messages.insert( - 0, - self._get_system_message(system_message), - ) - return {"messages": messages} + messages = [self._get_system_message(system_message)] + messages + + return {"messages": messages}, messages class OpenAI(LLMModel): @@ -341,7 +342,7 @@ class OpenAI(LLMModel): "system_message": system_message, "messages": messages, "history": history, - } + }, messages @classmethod def register_arguments(cls, parser: ArgumentParser): -- GitLab