diff --git a/evoprompt/models.py b/evoprompt/models.py index 91467761f682a2f21519335a9acfeecc2746bbfe..bbfebaffa351f52ce54b578bd4f15fae1e25cf86 100644 --- a/evoprompt/models.py +++ b/evoprompt/models.py @@ -64,7 +64,7 @@ class LLMModel(ABC): # set up caching for model calls self._call_model_cached = None - if "disable_cache" not in self.kwargs or not self.kwargs["disable_cache"]: + if not self.kwargs.get("disable_cache", False): cache = Cache(Path(".cache_dir", self.model_cache_key)) @cache.memoize(typed=True, ignore=[0, "func"])