mirror of
https://github.com/DS4SD/docling.git
synced 2025-12-08 12:48:28 +00:00
Merge branch 'dev/add-granite-docling-extension' of github.com:DS4SD/docling into dev/add-granite-docling-extension
This commit is contained in:
@@ -36,7 +36,7 @@ Docling simplifies document processing, parsing diverse formats — including ad
|
||||
* 🔒 Local execution capabilities for sensitive data and air-gapped environments
|
||||
* 🤖 Plug-and-play [integrations][integrations] incl. LangChain, LlamaIndex, Crew AI & Haystack for agentic AI
|
||||
* 🔍 Extensive OCR support for scanned PDFs and images
|
||||
* 👓 Support of several Visual Language Models ([SmolDocling](https://huggingface.co/ds4sd/SmolDocling-256M-preview))
|
||||
* 👓 Support of several Visual Language Models ([GraniteDocling](https://huggingface.co/ibm-granite/granite-docling-258M))
|
||||
* 🎙️ Audio support with Automatic Speech Recognition (ASR) models
|
||||
* 🔌 Connect to any agent using the [MCP server](https://docling-project.github.io/docling/usage/mcp/)
|
||||
* 💻 Simple and convenient CLI
|
||||
@@ -88,9 +88,9 @@ Docling has a built-in CLI to run conversions.
|
||||
docling https://arxiv.org/pdf/2206.01062
|
||||
```
|
||||
|
||||
You can also use 🥚[SmolDocling](https://huggingface.co/ds4sd/SmolDocling-256M-preview) and other VLMs via Docling CLI:
|
||||
You can also use 🥚[GraniteDocling](https://huggingface.co/ibm-granite/granite-docling-258M) and other VLMs via Docling CLI:
|
||||
```bash
|
||||
docling --pipeline vlm --vlm-model smoldocling https://arxiv.org/pdf/2206.01062
|
||||
docling --pipeline vlm --vlm-model granitedocling https://arxiv.org/pdf/2206.01062
|
||||
```
|
||||
This will use MLX acceleration on supported Apple Silicon hardware.
|
||||
|
||||
|
||||
@@ -336,7 +336,7 @@ def convert( # noqa: C901
|
||||
vlm_model: Annotated[
|
||||
VlmModelType,
|
||||
typer.Option(..., help="Choose the VLM model to use with PDF or image files."),
|
||||
] = VlmModelType.SMOLDOCLING,
|
||||
] = VlmModelType.GRANITEDOCLING,
|
||||
asr_model: Annotated[
|
||||
AsrModelType,
|
||||
typer.Option(..., help="Choose the ASR model to use with audio/video files."),
|
||||
@@ -695,7 +695,7 @@ def convert( # noqa: C901
|
||||
pipeline_options.vlm_options = GRANITEDOCLING_MLX
|
||||
except ImportError:
|
||||
_log.warning(
|
||||
"To run SmolDocling faster, please install mlx-vlm:\n"
|
||||
"To run GraniteDocling faster, please install mlx-vlm:\n"
|
||||
"pip install mlx-vlm"
|
||||
)
|
||||
elif vlm_model == VlmModelType.SMOLDOCLING_VLLM:
|
||||
|
||||
@@ -33,6 +33,8 @@ class _AvailableModels(str, Enum):
|
||||
CODE_FORMULA = "code_formula"
|
||||
PICTURE_CLASSIFIER = "picture_classifier"
|
||||
SMOLVLM = "smolvlm"
|
||||
GRANITEDOCLING = "granitedocling"
|
||||
GRANITEDOCLING_MLX = "granitedocling_mlx"
|
||||
SMOLDOCLING = "smoldocling"
|
||||
SMOLDOCLING_MLX = "smoldocling_mlx"
|
||||
GRANITE_VISION = "granite_vision"
|
||||
@@ -108,6 +110,8 @@ def download(
|
||||
with_code_formula=_AvailableModels.CODE_FORMULA in to_download,
|
||||
with_picture_classifier=_AvailableModels.PICTURE_CLASSIFIER in to_download,
|
||||
with_smolvlm=_AvailableModels.SMOLVLM in to_download,
|
||||
with_granitedocling=_AvailableModels.GRANITEDOCLING in to_download,
|
||||
with_granitedocling_mlx=_AvailableModels.GRANITEDOCLING_MLX in to_download,
|
||||
with_smoldocling=_AvailableModels.SMOLDOCLING in to_download,
|
||||
with_smoldocling_mlx=_AvailableModels.SMOLDOCLING_MLX in to_download,
|
||||
with_granite_vision=_AvailableModels.GRANITE_VISION in to_download,
|
||||
|
||||
@@ -12,7 +12,7 @@ from pydantic import (
|
||||
)
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from docling.datamodel import asr_model_specs
|
||||
from docling.datamodel import asr_model_specs, vlm_model_specs
|
||||
|
||||
# Import the following for backwards compatibility
|
||||
from docling.datamodel.accelerator_options import AcceleratorDevice, AcceleratorOptions
|
||||
@@ -290,7 +290,7 @@ class VlmPipelineOptions(PaginatedPipelineOptions):
|
||||
)
|
||||
# If True, text from backend will be used instead of generated text
|
||||
vlm_options: Union[InlineVlmOptions, ApiVlmOptions] = (
|
||||
smoldocling_vlm_conversion_options
|
||||
vlm_model_specs.GRANITEDOCLING_TRANSFORMERS
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ from docling.datamodel.pipeline_options import (
|
||||
)
|
||||
from docling.datamodel.settings import settings
|
||||
from docling.datamodel.vlm_model_specs import (
|
||||
GRANITEDOCLING_MLX,
|
||||
GRANITEDOCLING_TRANSFORMERS,
|
||||
SMOLDOCLING_MLX,
|
||||
SMOLDOCLING_TRANSFORMERS,
|
||||
)
|
||||
@@ -34,6 +36,8 @@ def download_models(
|
||||
with_code_formula: bool = True,
|
||||
with_picture_classifier: bool = True,
|
||||
with_smolvlm: bool = False,
|
||||
with_granitedocling: bool = False,
|
||||
with_granitedocling_mlx: bool = False,
|
||||
with_smoldocling: bool = False,
|
||||
with_smoldocling_mlx: bool = False,
|
||||
with_granite_vision: bool = False,
|
||||
@@ -86,6 +90,24 @@ def download_models(
|
||||
progress=progress,
|
||||
)
|
||||
|
||||
if with_granitedocling:
|
||||
_log.info("Downloading GraniteDocling model...")
|
||||
download_hf_model(
|
||||
repo_id=GRANITEDOCLING_TRANSFORMERS.repo_id,
|
||||
local_dir=output_dir / GRANITEDOCLING_TRANSFORMERS.repo_cache_folder,
|
||||
force=force,
|
||||
progress=progress,
|
||||
)
|
||||
|
||||
if with_granitedocling_mlx:
|
||||
_log.info("Downloading GraniteDocling MLX model...")
|
||||
download_hf_model(
|
||||
repo_id=GRANITEDOCLING_MLX.repo_id,
|
||||
local_dir=output_dir / GRANITEDOCLING_MLX.repo_cache_folder,
|
||||
force=force,
|
||||
progress=progress,
|
||||
)
|
||||
|
||||
if with_smoldocling:
|
||||
_log.info("Downloading SmolDocling model...")
|
||||
download_hf_model(
|
||||
|
||||
4
docs/examples/minimal_vlm_pipeline.py
vendored
4
docs/examples/minimal_vlm_pipeline.py
vendored
@@ -32,7 +32,7 @@ from docling.pipeline.vlm_pipeline import VlmPipeline
|
||||
source = "https://arxiv.org/pdf/2501.17887"
|
||||
|
||||
###### USING SIMPLE DEFAULT VALUES
|
||||
# - SmolDocling model
|
||||
# - GraniteDocling model
|
||||
# - Using the transformers framework
|
||||
|
||||
converter = DocumentConverter(
|
||||
@@ -53,7 +53,7 @@ print(doc.export_to_markdown())
|
||||
# For more options see the `compare_vlm_models.py` example.
|
||||
|
||||
pipeline_options = VlmPipelineOptions(
|
||||
vlm_options=vlm_model_specs.SMOLDOCLING_MLX,
|
||||
vlm_options=vlm_model_specs.GRANITEDOCLING_MLX,
|
||||
)
|
||||
|
||||
converter = DocumentConverter(
|
||||
|
||||
2
docs/index.md
vendored
2
docs/index.md
vendored
@@ -28,7 +28,7 @@ Docling simplifies document processing, parsing diverse formats — including ad
|
||||
* 🔒 Local execution capabilities for sensitive data and air-gapped environments
|
||||
* 🤖 Plug-and-play [integrations][integrations] incl. LangChain, LlamaIndex, Crew AI & Haystack for agentic AI
|
||||
* 🔍 Extensive OCR support for scanned PDFs and images
|
||||
* 👓 Support of several Visual Language Models ([SmolDocling](https://huggingface.co/ds4sd/SmolDocling-256M-preview))
|
||||
* 👓 Support of several Visual Language Models ([GraniteDocling](https://huggingface.co/ibm-granite/granite-docling-258M))
|
||||
* 🎙️ Support for Audio with Automatic Speech Recognition (ASR) models
|
||||
* 🔌 Connect to any agent using the [Docling MCP](https://docling-project.github.io/docling/usage/mcp/) server
|
||||
* 💻 Simple and convenient CLI
|
||||
|
||||
4
docs/usage/index.md
vendored
4
docs/usage/index.md
vendored
@@ -31,9 +31,9 @@ You can additionally use Docling directly from your terminal, for instance:
|
||||
docling https://arxiv.org/pdf/2206.01062
|
||||
```
|
||||
|
||||
The CLI provides various options, such as 🥚[SmolDocling](https://huggingface.co/ds4sd/SmolDocling-256M-preview) (incl. MLX acceleration) & other VLMs:
|
||||
The CLI provides various options, such as 🥚[GraniteDocling](https://huggingface.co/ibm-granite/granite-docling-258M) (incl. MLX acceleration) & other VLMs:
|
||||
```bash
|
||||
docling --pipeline vlm --vlm-model smoldocling https://arxiv.org/pdf/2206.01062
|
||||
docling --pipeline vlm --vlm-model granitedocling https://arxiv.org/pdf/2206.01062
|
||||
```
|
||||
|
||||
For all available options, run `docling --help` or check the [CLI reference](../reference/cli.md).
|
||||
|
||||
2
docs/usage/vision_models.md
vendored
2
docs/usage/vision_models.md
vendored
@@ -45,6 +45,8 @@ The following table reports the models currently available out-of-the-box.
|
||||
|
||||
| Model instance | Model | Framework | Device | Num pages | Inference time (sec) |
|
||||
| ---------------|------ | --------- | ------ | --------- | ---------------------|
|
||||
| `vlm_model_specs.GRANITEDOCLING_TRANSFORMERS` | [ibm-granite/granite-docling-258M](https://huggingface.co/ibm-granite/granite-docling-258M) | `Transformers/AutoModelForVision2Seq` | MPS | 1 | - |
|
||||
| `vlm_model_specs.GRANITEDOCLING_MLX` | [ibm-granite/granite-docling-258M-mlx-bf16](https://huggingface.co/ibm-granite/granite-docling-258M-mlx-bf16) | `MLX`| MPS | 1 | - |
|
||||
| `vlm_model_specs.SMOLDOCLING_TRANSFORMERS` | [ds4sd/SmolDocling-256M-preview](https://huggingface.co/ds4sd/SmolDocling-256M-preview) | `Transformers/AutoModelForVision2Seq` | MPS | 1 | 102.212 |
|
||||
| `vlm_model_specs.SMOLDOCLING_MLX` | [ds4sd/SmolDocling-256M-preview-mlx-bf16](https://huggingface.co/ds4sd/SmolDocling-256M-preview-mlx-bf16) | `MLX`| MPS | 1 | 6.15453 |
|
||||
| `vlm_model_specs.QWEN25_VL_3B_MLX` | [mlx-community/Qwen2.5-VL-3B-Instruct-bf16](https://huggingface.co/mlx-community/Qwen2.5-VL-3B-Instruct-bf16) | `MLX`| MPS | 1 | 23.4951 |
|
||||
|
||||
@@ -83,7 +83,7 @@ nav:
|
||||
- "Custom conversion": examples/custom_convert.py
|
||||
- "Batch conversion": examples/batch_convert.py
|
||||
- "Multi-format conversion": examples/run_with_formats.py
|
||||
- "VLM pipeline with SmolDocling": examples/minimal_vlm_pipeline.py
|
||||
- "VLM pipeline with GraniteDocling": examples/minimal_vlm_pipeline.py
|
||||
- "VLM pipeline with remote model": examples/vlm_pipeline_api_model.py
|
||||
- "VLM comparison": examples/compare_vlm_models.py
|
||||
- "ASR pipeline with Whisper": examples/minimal_asr_pipeline.py
|
||||
|
||||
Reference in New Issue
Block a user