Skip to content

Commit 96bbd08

Browse files
Merge pull request #239 from askui/feat/model_id-as-env-variable
feat: the value of `model_id` for vlm_providers can now be set as env variable
2 parents ecc49e0 + 9e9606b commit 96bbd08

6 files changed

Lines changed: 30 additions & 16 deletions

File tree

.env.template

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,9 @@ ASKUI_WORKSPACE_ID=
1212
# OpenRouter
1313
OPEN_ROUTER_API_KEY=
1414

15+
# Models
16+
VLM_PROVIDER_MODEL_ID=
17+
1518
# Telemetry
1619
ASKUI__VA__TELEMETRY__ENABLED=True # Set to "False" to disable telemetry
1720

docs/04_using_models.md

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,19 @@ with ComputerAgent() as agent:
1818

1919
## Configuring Model IDs
2020

21-
If you want to use another model, you select one of the available ones and set is through overriding the model_id in the provider:
21+
If you want to use another model, you select one of the available ones and set as an environment variable (**currently only supported for vlm_provider!**):
22+
```
23+
VLM_PROVIDER_MODEL_ID=claude-opus-4-6
24+
```
25+
26+
Alternatively, you can also set it through overriding the model_id in the provider:
2227

2328
```python
2429
from askui import AgentSettings, ComputerAgent
2530
from askui.model_providers import AskUIVlmProvider, AskUIImageQAProvider
2631

2732
with ComputerAgent(settings=AgentSettings(
28-
vlm_provider=AskUIVlmProvider(model_id="claude-opus-4-5-20251101"),
33+
vlm_provider=AskUIVlmProvider(model_id="claude-opus-4-6"),
2934
image_qa_provider=AskUIImageQAProvider(model_id="gemini-2.5-pro"),
3035
)) as agent:
3136
agent.act("Complete the checkout process")
@@ -34,11 +39,11 @@ with ComputerAgent(settings=AgentSettings(
3439
The following models are available with your AskUI credentials through the AskUI API:
3540

3641
**VLM Provider** (for `act()`): Claude models via AskUI's Anthropic proxy
37-
- `claude-haiku-4-5-20251001`
38-
- `claude-sonnet-4-5-20250929` (default)
42+
- `claude-haiku-4-5-20251001` (most cost efficient)
43+
- `claude-sonnet-4-5-20250929`
3944
- `claude-opus-4-5-20251101`
40-
- `claude-opus-4-6`(coming soon!)
41-
- `claude-sonnet-4-6`(coming soon!)
45+
- `claude-sonnet-4-6`(default)
46+
- `claude-opus-4-6` (most capable)
4247

4348

4449
**Image Q&A Provider** (for `get()`): Gemini models via AskUI's Gemini proxy

docs/05_bring_your_own_model_provider.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ from askui.model_providers import AnthropicVlmProvider
5858

5959
with ComputerAgent(settings=AgentSettings(
6060
vlm_provider=AnthropicVlmProvider(
61-
model_id="claude-sonnet-4-5-20251101",
61+
model_id="claude-opus-4-6",
6262
),
6363
)) as agent:
6464
agent.act("Navigate to settings")

src/askui/agent_settings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ class AgentSettings:
153153
from askui.model_providers import AskUIVlmProvider, AskUIImageQAProvider
154154
155155
agent = ComputerAgent(settings=AgentSettings(
156-
vlm_provider=AskUIVlmProvider(model_id=\"claude-opus-4-5-20251101\"),
156+
vlm_provider=AskUIVlmProvider(model_id=\"claude-opus-4-6\"),
157157
image_qa_provider=AskUIImageQAProvider(model_id=\"gemini-2.5-pro\"),
158158
))
159159
```

src/askui/model_providers/anthropic_vlm_provider.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""AnthropicVlmProvider — VLM access via direct Anthropic API."""
22

3+
import os
34
from functools import cached_property
45
from typing import Any
56

@@ -34,7 +35,7 @@ class AnthropicVlmProvider(VlmProvider):
3435
auth_token (str | None, optional): Authorization token for custom
3536
authentication. Added as an `Authorization` header.
3637
model_id (str, optional): Claude model to use. Defaults to
37-
`\"claude-sonnet-4-5-20251101\"`.
38+
`\"claude-sonnet-4-6\"`.
3839
client (Anthropic | None, optional): Pre-configured Anthropic client.
3940
If provided, other connection parameters are ignored.
4041
@@ -57,10 +58,12 @@ def __init__(
5758
api_key: str | None = None,
5859
base_url: str | None = None,
5960
auth_token: str | None = None,
60-
model_id: str = _DEFAULT_MODEL_ID,
61+
model_id: str | None = None,
6162
client: Anthropic | None = None,
6263
) -> None:
63-
self._model_id_value = model_id
64+
self._model_id_value = (
65+
model_id or os.environ.get("VLM_PROVIDER_MODEL_ID") or _DEFAULT_MODEL_ID
66+
)
6467
if client is not None:
6568
self.client = client
6669
else:

src/askui/model_providers/askui_vlm_provider.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""AskUIVlmProvider — VLM access via AskUI's hosted Anthropic proxy."""
22

3+
import os
34
from functools import cached_property
45
from typing import Any
56

@@ -33,7 +34,7 @@ class AskUIVlmProvider(VlmProvider):
3334
token (str | None, optional): AskUI API token. Reads `ASKUI_TOKEN`
3435
from the environment if not provided.
3536
model_id (str, optional): Claude model to use. Defaults to
36-
`"claude-sonnet-4-5-20250929"`.
37+
`"claude-sonnet-4-6"`.
3738
client (Anthropic | None, optional): Pre-configured Anthropic client.
3839
If provided, `workspace_id` and `token` are ignored.
3940
@@ -55,17 +56,19 @@ class AskUIVlmProvider(VlmProvider):
5556
def __init__(
5657
self,
5758
askui_settings: AskUiInferenceApiSettings | None = None,
58-
model_id: str = _DEFAULT_MODEL_ID,
59+
model_id: str | None = None,
5960
client: Anthropic | None = None,
6061
) -> None:
6162
self._askui_settings = askui_settings or AskUiInferenceApiSettings()
62-
self._model_id = model_id
63+
self._model_id_value = (
64+
model_id or os.environ.get("VLM_PROVIDER_MODEL_ID") or _DEFAULT_MODEL_ID
65+
)
6366
self._injected_client = client
6467

6568
@property
6669
@override
6770
def model_id(self) -> str:
68-
return self._model_id
71+
return self._model_id_value
6972

7073
@cached_property
7174
def _messages_api(self) -> AnthropicMessagesApi:
@@ -100,7 +103,7 @@ def create_message(
100103
) -> MessageParam:
101104
result: MessageParam = self._messages_api.create_message(
102105
messages=messages,
103-
model_id=self._model_id,
106+
model_id=self._model_id_value,
104107
tools=tools,
105108
max_tokens=max_tokens,
106109
system=system,

0 commit comments

Comments
 (0)