diff --git a/evoprompt/models.py b/evoprompt/models.py index 57e219a40960fd6c2df14abcdcb765c02240b1e1..9566f15351752385246deac10d7e23da659f9520 100644 --- a/evoprompt/models.py +++ b/evoprompt/models.py @@ -535,7 +535,7 @@ class HfChat(ChatModel, LLMModel): model_call_kwargs = { "text_inputs": messages, "stop": stop, - "max_length": max_tokens if max_tokens is not None else 2048, + "max_length": max_tokens if max_tokens is not None else 16384, } if use_randomness: # same temperature as in evoprompt paper reference implementation