From 331a4901115f44937116830860a65f8db8eacb6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Grie=C3=9Fhaber?= <griesshaber@hdm-stuttgart.de> Date: Mon, 19 Aug 2024 18:53:30 +0200 Subject: [PATCH] simplify history handling in `ChatModel.create_completion` --- evoprompt/models.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/evoprompt/models.py b/evoprompt/models.py index 19eb363..ce55258 100644 --- a/evoprompt/models.py +++ b/evoprompt/models.py @@ -281,11 +281,8 @@ class ChatModel: # a history is prepended to the messages, and we assume that it also includes a system message, i.e., we never add a system message in this case # TODO is it better to check for a system message in the history? - if history is not None: - messages = history + messages - [messages.insert(index, entry) for index, entry in enumerate(history)] - elif system_message: - messages = [self._get_system_message(system_message)] + messages + if history is None and system_message: + history = [self._get_system_message(system_message)] reponse, usage = self._create_completion( messages=messages, @@ -296,7 +293,7 @@ class ChatModel: ) messages.append(self._get_assistant_message(reponse)) - return reponse, messages, usage + return reponse, history + messages, usage class LlamaChat(Llama, ChatModel): -- GitLab