From 49a22b31f103641d7462f0bc0037f3197084844f Mon Sep 17 00:00:00 2001 From: Maximilian Kimmich <maximilian.kimmich@ims.uni-stuttgart.de> Date: Sat, 19 Oct 2024 14:29:54 +0200 Subject: [PATCH] Fix not showing all inputs in ResponseEditor --- evoprompt/evolution/evolution.py | 7 +++---- evoprompt/models.py | 3 +-- evoprompt/optimization.py | 20 +++++++++++++++++--- 3 files changed, 21 insertions(+), 9 deletions(-) diff --git a/evoprompt/evolution/evolution.py b/evoprompt/evolution/evolution.py index 7412f1f..e57e681 100644 --- a/evoprompt/evolution/evolution.py +++ b/evoprompt/evolution/evolution.py @@ -290,7 +290,7 @@ class GeneticAlgorithm(EvolutionAlgorithm): prompt1=prompt_1, prompt2=prompt_2, ) - response, history, recent_turn, usage = self.evolution_model.create_completion( + response, _, recent_turn, usage = self.evolution_model.create_completion( system_message=SYSTEM_MESSAGE, prompt=filled_prompt, history=demo_messages if self.use_evolution_demo else None, @@ -403,7 +403,7 @@ class DifferentialEvolution(EvolutionAlgorithm): prompt3=best_prompt_current_evolution, basic_prompt=prompts_current_evolution[current_iteration], ) - response, history, recent_turn, usage = self.evolution_model.create_completion( + response, _, recent_turn, usage = self.evolution_model.create_completion( system_message=SYSTEM_MESSAGE, prompt=filled_prompt, history=demo_messages if self.use_evolution_demo else None, @@ -511,7 +511,6 @@ class DifferentialEvolutionWithCot(DifferentialEvolution): evolutions_steps = [] # list (turns) of list (demonstrations) demos = [[]] - response: str = "" judgements: list[Judgement] = [] usage: ModelUsage = ModelUsage() for idx, prompt in enumerate(self._get_prompt_template()): @@ -573,11 +572,11 @@ class DifferentialEvolutionWithCot(DifferentialEvolution): ) # replace last message with corrected response - recent_turn[-1]["content"] = judgement.corrected_response response = judgement.corrected_response # update history with recent turn history += recent_turn + history.append(self.evolution_model._get_assistant_message(response)) evolved_prompt = self.parse_response(response) diff --git a/evoprompt/models.py b/evoprompt/models.py index f80a582..cb7c4ff 100644 --- a/evoprompt/models.py +++ b/evoprompt/models.py @@ -118,7 +118,6 @@ class LLMModel(ABC): use_randomness=use_randomness, ) - messages.append(self._get_assistant_message(reponse)) return reponse, None, messages, usage def build_demonstration_data( @@ -438,7 +437,7 @@ class ChatModel: return ( reponse, history, - messages + [self._get_assistant_message(reponse)], + messages, usage, ) diff --git a/evoprompt/optimization.py b/evoprompt/optimization.py index db40b63..cc3710a 100644 --- a/evoprompt/optimization.py +++ b/evoprompt/optimization.py @@ -47,11 +47,13 @@ class ResponseEditor(App): instruction: str, original_response: str, history: ChatMessages, + recent_turn: ChatMessages, judge_response: str, ): self.instruction = instruction self.response = original_response self.history = history + self.recent_turn = recent_turn self.judge_response = judge_response self.skip = False # used to mark the prompt as skipped super().__init__() @@ -64,11 +66,22 @@ class ResponseEditor(App): Collapsible( Static(message["content"]), title=message["role"], - collapsed=idx != len(self.history) - 1, + collapsed=True, ) - for idx, message in enumerate(self.history) + for message in self.history + ) + ) + yield ScrollableContainer( + *( + Collapsible( + Static(message["content"]), + title=message["role"], + collapsed=False, ) + for message in self.recent_turn ) + ) + yield ScrollableContainer( Label(Panel(self.judge_response, title="Judge response")), Label(Rule(title="Response to edit"), expand=True), @@ -408,7 +421,8 @@ class PromptOptimization: editor = ResponseEditor( instruction, response, - history[:-1] if history is not None else None, + history if history is not None else None, + recent_turn=recent_turn, judge_response=judgement_response, ) editor.run() -- GitLab