Skip to content
Snippets Groups Projects
Commit 543d6247 authored by Max Kimmich's avatar Max Kimmich
Browse files

Fix chat model not taking into account history

parent e2536dad
No related branches found
No related tags found
No related merge requests found
...@@ -314,10 +314,10 @@ class DifferentialEvolutionWithCot(DifferentialEvolution): ...@@ -314,10 +314,10 @@ class DifferentialEvolutionWithCot(DifferentialEvolution):
) )
messages = None messages = None
for idx, prompt in enumerate(DE_COT_PROMPTS): for idx, prompt_template in enumerate(DE_COT_PROMPTS):
response, messages, usage = self.evolution_model.create_completion( response, messages, usage = self.evolution_model.create_completion(
system_message=SYSTEM_MESSAGE, system_message=SYSTEM_MESSAGE,
prompt=prompt.format( prompt=prompt_template.format(
prompt1=prompt_1, prompt1=prompt_1,
prompt2=prompt_2, prompt2=prompt_2,
prompt3=best_prompt_current_evolution, prompt3=best_prompt_current_evolution,
......
...@@ -275,14 +275,18 @@ class ChatModel: ...@@ -275,14 +275,18 @@ class ChatModel:
history: ChatMessages | None = None, history: ChatMessages | None = None,
**kwargs: Any, **kwargs: Any,
) -> tuple[str, ModelUsage]: ) -> tuple[str, ModelUsage]:
# create prompt
prompt = prompt_prefix + prompt + prompt_suffix + prompt_appendix
messages = [self._get_user_message(prompt)]
# a history is prepended to the messages, and we assume that it also includes a system message, i.e., we never add a system message in this case # a history is prepended to the messages, and we assume that it also includes a system message, i.e., we never add a system message in this case
# TODO is it better to check for a system message in the history? # TODO is it better to check for a system message in the history?
if history is None and system_message: if history is None and system_message:
history = [self._get_system_message(system_message)] messages = [self._get_system_message(system_message)]
elif history is not None:
messages = history
else:
messages = []
# create prompt
prompt = prompt_prefix + prompt + prompt_suffix + prompt_appendix
messages += [self._get_user_message(prompt)]
reponse, usage = self._create_completion( reponse, usage = self._create_completion(
messages=messages, messages=messages,
...@@ -293,7 +297,7 @@ class ChatModel: ...@@ -293,7 +297,7 @@ class ChatModel:
) )
messages.append(self._get_assistant_message(reponse)) messages.append(self._get_assistant_message(reponse))
return reponse, history + messages, usage return reponse, messages, usage
class LlamaChat(ChatModel, Llama): class LlamaChat(ChatModel, Llama):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment