diff --git a/evoprompt/models.py b/evoprompt/models.py index 3d64e89e516b3dfa99bbccbe5c92f13f4e9c4631..2dabae9fa66685d56c5721f4dde2a6cc1942c323 100644 --- a/evoprompt/models.py +++ b/evoprompt/models.py @@ -66,7 +66,7 @@ class LLMModel(ABC): # set up caching for model calls self._call_model_cached = None - if "disable_cache" not in self.kwargs or not self.kwargs["disable_cache"]: + if not self.kwargs.get("disable_cache", False): cache = Cache(Path(".cache_dir", self.model_cache_key)) @cache.memoize(typed=True, ignore=[0, "func"])