From b23925a74e4896c88caea99ebc112eed6421889b Mon Sep 17 00:00:00 2001 From: Christoph Auer <60343111+cau-git@users.noreply.github.com> Date: Mon, 7 Jul 2025 14:57:38 +0200 Subject: [PATCH] Use lmstudio-community model Signed-off-by: Christoph Auer <60343111+cau-git@users.noreply.github.com> --- docs/examples/vlm_pipeline_api_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/examples/vlm_pipeline_api_model.py b/docs/examples/vlm_pipeline_api_model.py index badcb769..e198811f 100644 --- a/docs/examples/vlm_pipeline_api_model.py +++ b/docs/examples/vlm_pipeline_api_model.py @@ -198,7 +198,7 @@ def main(): # Example using the OlmOcr (dynamic prompt) model with Ollama: # (uncomment the following lines) # pipeline_options.vlm_options = ollama_olmocr_vlm_options( - # model="hf.co/allenai/olmOCR-7B-0225-preview", + # model="hf.co/lmstudio-community/olmOCR-7B-0225-preview-GGUF:Q8_0", # ) # Another possibility is using online services, e.g. watsonx.ai.