-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Add OpenRouterModel
#1870
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Closed
DanKing1903
wants to merge
9
commits into
pydantic:main
from
DanKing1903:feature/add-openrouter-model
Closed
Add OpenRouterModel
#1870
Changes from all commits
Commits
Show all changes
9 commits
Select commit
Hold shift + click to select a range
1341160
Add `OpenRouterModel`
DanKing1903 4b217d9
Add LLM provider error handling to OpenRouterModel
DanKing1903 d43f8a3
Merge remote-tracking branch 'upstream/main' into feature/add-openrou…
DanKing1903 67bb1e6
Merge branch 'main' into feature/add-openrouter-model
DanKing1903 eb479b6
chore: move openrouter error check test to models test dir
DanKing1903 19bf3da
chore: directly access model_response.vendor_details
DanKing1903 f3a95ac
chore: simplify error check
DanKing1903 57f5e09
chore: implement init method ffor OpenRouterModel
DanKing1903 c7aebf3
chore: moved openrouter error check to _completions_create
DanKing1903 File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
from typing import Any, Literal, overload | ||
|
||
from openai import AsyncStream | ||
from openai.types.chat import ChatCompletion, ChatCompletionChunk | ||
from pydantic import BaseModel | ||
|
||
from .. import ModelHTTPError | ||
from ..messages import ModelMessage, ModelResponse | ||
from ..profiles import ModelProfileSpec | ||
from ..providers.openrouter import OpenRouterProvider | ||
from . import ModelRequestParameters | ||
from .openai import OpenAIModel, OpenAIModelName, OpenAIModelSettings, OpenAISystemPromptRole | ||
|
||
|
||
class OpenRouterErrorResponse(BaseModel): | ||
"""Represents error responses from upstream LLM provider relayed by OpenRouter. | ||
|
||
Attributes: | ||
code: The error code returned by LLM provider. | ||
message: The error message returned by OpenRouter | ||
metadata: Additional error context provided by OpenRouter. | ||
|
||
See: https://openrouter.ai/docs/api-reference/errors | ||
""" | ||
|
||
code: int | ||
message: str | ||
metadata: dict[str, Any] | None | ||
|
||
|
||
class OpenRouterChatCompletion(ChatCompletion): | ||
"""Extends ChatCompletion with OpenRouter-specific attributes. | ||
|
||
This class extends the base ChatCompletion model to include additional | ||
fields returned specifically by the OpenRouter API. | ||
|
||
Attributes: | ||
provider: The name of the upstream LLM provider (e.g., "Anthropic", | ||
"OpenAI", etc.) that processed the request through OpenRouter. | ||
""" | ||
|
||
provider: str | ||
|
||
|
||
class OpenRouterModel(OpenAIModel): | ||
"""Extends OpenAIModel to capture extra metadata for Openrouter.""" | ||
|
||
def __init__( | ||
self, | ||
model_name: OpenAIModelName, | ||
*, | ||
provider: Literal['openrouter'] | OpenRouterProvider = 'openrouter', | ||
profile: ModelProfileSpec | None = None, | ||
system_prompt_role: OpenAISystemPromptRole | None = None, | ||
): | ||
super().__init__(model_name, provider=provider, profile=profile, system_prompt_role=system_prompt_role) | ||
|
||
@overload | ||
async def _completions_create( | ||
self, | ||
messages: list[ModelMessage], | ||
stream: Literal[True], | ||
model_settings: OpenAIModelSettings, | ||
model_request_parameters: ModelRequestParameters, | ||
) -> AsyncStream[ChatCompletionChunk]: ... | ||
|
||
@overload | ||
async def _completions_create( | ||
self, | ||
messages: list[ModelMessage], | ||
stream: Literal[False], | ||
model_settings: OpenAIModelSettings, | ||
model_request_parameters: ModelRequestParameters, | ||
) -> ChatCompletion: ... | ||
|
||
async def _completions_create( | ||
self, | ||
messages: list[ModelMessage], | ||
stream: bool, | ||
model_settings: OpenAIModelSettings, | ||
model_request_parameters: ModelRequestParameters, | ||
) -> ChatCompletion | AsyncStream[ChatCompletionChunk]: | ||
response = await super()._completions_create( | ||
messages=messages, | ||
stream=stream, | ||
model_settings=model_settings, | ||
model_request_parameters=model_request_parameters, | ||
) | ||
if error := getattr(response, 'error', None): | ||
parsed_error = OpenRouterErrorResponse.model_validate(error) | ||
raise ModelHTTPError( | ||
status_code=parsed_error.code, model_name=self.model_name, body=parsed_error.model_dump() | ||
) | ||
else: | ||
return response | ||
|
||
def _process_response(self, response: ChatCompletion) -> ModelResponse: | ||
response = OpenRouterChatCompletion.construct(**response.model_dump()) | ||
model_response = super()._process_response(response=response) | ||
openrouter_provider: str | None = getattr(response, 'provider', None) | ||
if openrouter_provider: | ||
vendor_details: dict[str, Any] = model_response.vendor_details or {} | ||
vendor_details['provider'] = openrouter_provider | ||
model_response.vendor_details = vendor_details | ||
return model_response |
50 changes: 50 additions & 0 deletions
50
tests/models/cassettes/test_openrouter/test_openrouter_errors_raised.yaml
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
interactions: | ||
- request: | ||
headers: | ||
accept: | ||
- application/json | ||
accept-encoding: | ||
- gzip, deflate | ||
connection: | ||
- keep-alive | ||
content-length: | ||
- '164' | ||
content-type: | ||
- application/json | ||
host: | ||
- openrouter.ai | ||
method: POST | ||
parsed_body: | ||
messages: | ||
- content: Be helpful. | ||
role: system | ||
- content: Tell me a joke. | ||
role: user | ||
model: google/gemini-2.0-flash-exp:free | ||
n: 1 | ||
stream: false | ||
uri: https://openrouter.ai/api/v1/chat/completions | ||
response: | ||
headers: | ||
access-control-allow-origin: | ||
- '*' | ||
connection: | ||
- keep-alive | ||
content-length: | ||
- '242' | ||
content-type: | ||
- application/json | ||
vary: | ||
- Accept-Encoding | ||
parsed_body: | ||
error: | ||
code: 429 # Upstream LLM provider error | ||
message: Provider returned error | ||
metadata: | ||
provider_name: Google | ||
raw: google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream; please retry shortly. | ||
user_id: user_2uRh0l3Yi3hdjBArTOSmLXWJBc4 | ||
status: | ||
code: 200 # Openrouter returns 200 OK | ||
message: OK | ||
version: 1 |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
import pytest | ||
from inline_snapshot import snapshot | ||
|
||
from pydantic_ai import Agent, ModelHTTPError | ||
|
||
from ..conftest import try_import | ||
|
||
with try_import() as imports_successful: | ||
from pydantic_ai.models.openrouter import OpenRouterModel | ||
from pydantic_ai.providers.openrouter import OpenRouterProvider | ||
|
||
pytestmark = [ | ||
pytest.mark.skipif(not imports_successful(), reason='openai not installed'), | ||
pytest.mark.vcr, | ||
pytest.mark.anyio, | ||
] | ||
|
||
|
||
async def test_openrouter_errors_raised(allow_model_requests: None, openrouter_api_key: str) -> None: | ||
provider = OpenRouterProvider(api_key=openrouter_api_key) | ||
model = OpenRouterModel('google/gemini-2.0-flash-exp:free', provider=provider) | ||
agent = Agent(model, instructions='Be helpful.', retries=1) | ||
with pytest.raises(ModelHTTPError) as exc_info: | ||
await agent.run('Tell me a joke.') | ||
assert str(exc_info.value) == snapshot( | ||
"status_code: 429, model_name: google/gemini-2.0-flash-exp:free, body: {'code': 429, 'message': 'Provider returned error', 'metadata': {'provider_name': 'Google', 'raw': 'google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream; please retry shortly.'}}" | ||
) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Can we please override
__init__
to automatically use the OpenRouterProvider as well when this model is used?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@DouweM I have added an
__init__
method with narrowed type annotations forprovider
that does nothing apart from callingsuper().__init__
. Is this acceptable or would you like me to add something like: