Unify temperature options for Vlm models

This commit is contained in:
Shkarupa Alex 2025-06-18 14:01:25 +03:00
parent 215b540f6c
commit 34d446cb98
2 changed files with 2 additions and 2 deletions

View File

@ -12,6 +12,7 @@ class BaseVlmOptions(BaseModel):
prompt: str
scale: float = 2.0
max_size: Optional[int] = None
temperature: float = 0.0
class ResponseFormat(str, Enum):
@ -51,7 +52,6 @@ class InlineVlmOptions(BaseVlmOptions):
AcceleratorDevice.MPS,
]
temperature: float = 0.0
stop_strings: List[str] = []
extra_generation_config: Dict[str, Any] = {}

View File

@ -34,7 +34,7 @@ class ApiVlmModel(BasePageModel):
)
self.params = {
**self.vlm_options.params,
"temperature": 0,
"temperature": self.vlm_options.temperature,
}
def __call__(