Merge branch 'vlm-dynamic-prompt' of https://github.com/shkarupa-alex/docling into vlm-dynamic-prompt

# Conflicts:
#	docs/examples/vlm_pipeline_api_model.py

Signed-off-by: Shkarupa Alex <shkarupa.alex@gmail.com>
This commit is contained in:
Shkarupa Alex 2025-07-07 17:09:11 +03:00
commit 91dedc3b63

View File

@ -180,12 +180,6 @@ def main():
format=ResponseFormat.DOCTAGS, format=ResponseFormat.DOCTAGS,
) )
# Example using the OlmOcr (dynamic prompt) model with LM Studio:
# (uncomment the following lines)
# pipeline_options.vlm_options = lms_olmocr_vlm_options(
# model="hf.co/lmstudio-community/olmOCR-7B-0225-preview-GGUF",
# )
# Example using the Granite Vision model with LM Studio: # Example using the Granite Vision model with LM Studio:
# (uncomment the following lines) # (uncomment the following lines)
# pipeline_options.vlm_options = lms_vlm_options( # pipeline_options.vlm_options = lms_vlm_options(
@ -194,6 +188,12 @@ def main():
# format=ResponseFormat.MARKDOWN, # format=ResponseFormat.MARKDOWN,
# ) # )
# Example using the OlmOcr (dynamic prompt) model with LM Studio:
# (uncomment the following lines)
# pipeline_options.vlm_options = lms_olmocr_vlm_options(
# model="hf.co/lmstudio-community/olmOCR-7B-0225-preview-GGUF",
# )
# Example using the Granite Vision model with Ollama: # Example using the Granite Vision model with Ollama:
# (uncomment the following lines) # (uncomment the following lines)
# pipeline_options.vlm_options = ollama_vlm_options( # pipeline_options.vlm_options = ollama_vlm_options(