diff --git a/llama_index/llms/localai.py b/llama_index/llms/localai.py index 9f307254a65467668a8cc51e9906b9b0093d2bb7..4da2dc03b8dd1e6cc94a3dc23c1e2a036251d237 100644 --- a/llama_index/llms/localai.py +++ b/llama_index/llms/localai.py @@ -7,7 +7,7 @@ Source: https://github.com/go-skynet/LocalAI import warnings from types import MappingProxyType -from typing import Any, Callable, Dict, Mapping, Optional, Sequence +from typing import Any, Callable, Dict, Optional, Sequence from llama_index.bridge.pydantic import Field from llama_index.constants import DEFAULT_CONTEXT_WINDOW @@ -19,7 +19,8 @@ from llama_index.types import BaseOutputParser, PydanticProgramMode # Use these as kwargs for OpenAILike to connect to LocalAIs DEFAULT_LOCALAI_PORT = 8080 -LOCALAI_DEFAULTS: Mapping[str, Any] = MappingProxyType( +# TODO: move to MappingProxyType[str, Any] once Python 3.9+ +LOCALAI_DEFAULTS: Dict[str, Any] = MappingProxyType( # type: ignore[assignment] { "api_key": "localai_fake", "api_type": "localai_fake",