8000 llama_cpp client: commit the autogenerated API client · abetlen/llama-cpp-python@26fbf88 · GitHub
[go: up one dir, main page]

Skip to content

Commit 26fbf88

Browse files
committed
llama_cpp client: commit the autogenerated API client
Result of running ./bin/generate_api_client
1 parent 698d043 commit 26fbf88

24 files changed

+4817
-1
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ _skbuild/
22

33
.envrc
44

5-
models/
5+
./models/
66

77
# Byte-compiled / optimized / DLL files
88
__pycache__/

llama_cpp/client/__init__.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# coding=utf-8
2+
# --------------------------------------------------------------------------
3+
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.5, generator: @autorest/python@6.4.11)
4+
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
5+
# --------------------------------------------------------------------------
6+
7+
from ._client import LlamaCppPythonAPI
8+
from ._version import VERSION
9+
10+
__version__ = VERSION
11+
12+
try:
13+
from ._patch import * # pylint: disable=unused-wildcard-import
14+
from ._patch import __all__ as _patch_all
15+
except ImportError:
16+
_patch_all = []
17+
from ._patch import patch_sdk as _patch_sdk
18+
19+
__all__ = [
20+
"LlamaCppPythonAPI",
21+
]
22+
__all__.extend([p for p in _patch_all if p not in __all__])
23+
24+
_patch_sdk()

llama_cpp/client/_client.py

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
# coding=utf-8
2+
# --------------------------------------------------------------------------
3+
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.5, generator: @autorest/python@6.4.11)
4+
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
5+
# --------------------------------------------------------------------------
6+
7+
from copy import deepcopy
8+
from typing import Any
9+
10+
from azure.core import PipelineClient
11+
from azure.core.rest import HttpRequest, HttpResponse
12+
13+
from . import models as _models
14+
from ._configuration import LlamaCppPythonAPIConfiguration
15+
from ._serialization import Deserializer, Serializer
16+
from .operations import CreateOperations, GetOperations
17+
18+
19+
class LlamaCppPythonAPI: # pylint: disable=client-accepts-api-version-keyword
20+
"""LlamaCppPythonAPI.
21+
22+
:ivar create: CreateOperations operations
23+
:vartype create: llama_cpp.client.operations.CreateOperations
24+
:ivar get: GetOperations operations
25+
:vartype get: llama_cpp.client.operations.GetOperations
26+
:keyword endpoint: Service URL. Required. Default value is "".
27+
:paramtype endpoint: str
28+
"""
29+
30+
def __init__( # pylint: disable=missing-client-constructor-parameter-credential
31+
self, *, endpoint: str = "", **kwargs: Any
32+
) -> None:
33+
self._config = LlamaCppPythonAPIConfiguration(**kwargs)
34+
self._client: PipelineClient = PipelineClient(
35+
base_url=endpoint, config=self._config, **kwargs
36+
)
37+
38+
client_models = {
39+
k: v for k, v in _models.__dict__.items() if isinstance(v, type)
40+
}
41+
self._serialize = Serializer(client_models)
42+
self._deserialize = Deserializer(client_models)
43+
self._serialize.client_side_validation = False
44+
self.create = CreateOperations(
45+
self._client, self._config, self._serialize, self._deserialize
46+
)
47+
self.get = GetOperations(
48+
self._client, self._config, self._serialize, self._deserialize
49+
)
50+
51+
def send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
52+
"""Runs the network request through the client's chained policies.
53+
54+
>>> from azure.core.rest import HttpRequest
55+
>>> request = HttpRequest("GET", "https://www.example.org/")
56+
<HttpRequest [GET], url: 'https://www.example.org/'>
57+
>>> response = client.send_request(request)
58+
<HttpResponse: 200 OK>
59+
60+
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
61+
62+
:param request: The network request you want to make. Required.
63+
:type request: ~azure.core.rest.HttpRequest
64+
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
65+
:return: The response of your network call. Does not do error handling on your response.
66+
:rtype: ~azure.core.rest.HttpResponse
67+
"""
68+
69+
request_copy = deepcopy(request)
70+
request_copy.url = self._client.format_url(request_copy.url)
71+
return self._client.send_request(request_copy, **kwargs)
72+
73+
def close(self) -> None:
74+
self._client.close()
75+
76+
def __enter__(self) -> "LlamaCppPythonAPI":
77+
self._client.__enter__()
78+
return self
79+
80+
def __exit__(self, *exc_details: Any) -> None:
81+
self._client.__exit__(*exc_details)

llama_cpp/client/_configuration.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
# coding=utf-8
2+
# --------------------------------------------------------------------------
3+
# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.5, generator: @autorest/python@6.4.11)
4+
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
5+
# --------------------------------------------------------------------------
6+
7+
from typing import Any
8+
9+
from azure.core.configuration import Configuration
10+
from azure.core.pipeline import policies
11+
12+
from ._version import VERSION
13+
14+
15+
class LlamaCppPythonAPIConfiguration(
16+
Configuration
17+
): # pylint: disable=too-many-instance-attributes
18+
"""Configuration for LlamaCppPythonAPI.
19+
20+
Note that all parameters used to create this instance are saved as instance
21+
attributes.
22+
"""
23+
24+
def __init__(self, **kwargs: Any) -> None:
25+
super(LlamaCppPythonAPIConfiguration, self).__init__(**kwargs)
26+
27+
kwargs.setdefault("sdk_moniker", "llama-cpp-api-client/{}".format(VERSION))
28+
self._configure(**kwargs)
29+
30+
def _configure(self, **kwargs: Any) -> None:
31+
self.user_agent_policy = kwargs.get(
32+
"user_agent_policy"
33+
) or policies.UserAgentPolicy(**kwargs)
34+
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(
35+
**kwargs
36+
)
37+
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
38+
self.logging_policy = kwargs.get(
39+
"logging_policy"
40+
) or policies.NetworkTraceLoggingPolicy(**kwargs)
41+
self.http_logging_policy = kwargs.get(
42+
"http_logging_policy"
43+
) or policies.HttpLoggingPolicy(**kwargs)
44+
self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
45+
self.custom_hook_policy = kwargs.get(
46+
"custom_hook_policy"
47+
) or policies.CustomHookPolicy(**kwargs)
48+
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(
49+
**kwargs
50+
)
51+
self.authentication_policy = kwargs.get("authentication_policy")

llama_cpp/client/_patch.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
# ------------------------------------
2+
# Copyright (c) Microsoft Corporation.
3+
# Licensed under the MIT License.
4+
# ------------------------------------
5+
"""Customize generated code here.
6+
7+
Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
8+
"""
9+
from typing import List
10+
11+
__all__: List[
12+
str
13+
] = [] # Add all objects you want publicly available to users at this package level
14+
15+
16+
def patch_sdk():
17+
"""Do not remove from this file.
18+
19+
`patch_sdk` is a last resort escape hatch that allows you to do customizations
20+
you can't accomplish using the techniques described in
21+
https://aka.ms/azsdk/python/dpcodegen/python/customize
22+
"""

0 commit comments

Comments
 (0)
0