fix: support new mlx-vlm module (#2001)

* fix stream_generate import statement

Signed-off-by: TwoLeaves <ohneherren@gmail.com>

* pin new mlx-vlm

Signed-off-by: Michele Dolfi <dol@zurich.ibm.com>

---------

Signed-off-by: TwoLeaves <ohneherren@gmail.com>
Signed-off-by: Michele Dolfi <dol@zurich.ibm.com>
Co-authored-by: Michele Dolfi <dol@zurich.ibm.com>
This commit is contained in:
TwoLeaves
2025-07-31 22:13:17 +10:00
committed by GitHub
parent 2eb760d060
commit 0130e3ae96
3 changed files with 61 additions and 190 deletions

View File

@@ -35,9 +35,9 @@ class HuggingFaceMlxModel(BasePageModel, HuggingFaceModelDownloadMixin):
if self.enabled:
try:
from mlx_vlm import generate, load # type: ignore
from mlx_vlm import generate, load, stream_generate # type: ignore
from mlx_vlm.prompt_utils import apply_chat_template # type: ignore
from mlx_vlm.utils import load_config, stream_generate # type: ignore
from mlx_vlm.utils import load_config # type: ignore
except ImportError:
raise ImportError(
"mlx-vlm is not installed. Please install it via `pip install mlx-vlm` to use MLX VLM models."