Skip to content

Commit

Permalink
Release 0.0.0-alpha0
Browse files Browse the repository at this point in the history
  • Loading branch information
fern-api[bot] committed Oct 5, 2024
1 parent b6baad4 commit 9ee3ac6
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 41 deletions.
10 changes: 7 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Vapi Python Library

[![fern shield](https://img.shields.io/badge/%F0%9F%8C%BF-SDK%20generated%20by%20Fern-brightgreen)](https://github.com/fern-api/fern)
[![fern shield](https://img.shields.io/badge/%F0%9F%8C%BF-Built%20with%20Fern-brightgreen)](https://buildwithfern.com?utm_source=github&utm_medium=github&utm_campaign=readme&utm_source=https%3A%2F%2Fgithub.com%2Ffern-demo%2Fvapi-python-sdk)
[![pypi](https://img.shields.io/pypi/v/Vapi)](https://pypi.python.org/pypi/Vapi)

The Vapi Python library provides convenient access to the Vapi API from Python.
Expand All @@ -11,6 +11,10 @@ The Vapi Python library provides convenient access to the Vapi API from Python.
pip install Vapi
```

## Reference

A full reference for this library is available [here](./reference.md).

## Usage

Instantiate and use the client with the following:
Expand Down Expand Up @@ -95,7 +99,7 @@ A request is deemed retriable when any of the following HTTP status codes is ret
Use the `max_retries` request option to configure this behavior.

```python
client.calls.create(..., {
client.calls.create(..., request_options={
"max_retries": 1
})
```
Expand All @@ -115,7 +119,7 @@ client = Vapi(


# Override timeout for a specific method
client.calls.create(..., {
client.calls.create(..., request_options={
"timeout_in_seconds": 1
})
```
Expand Down
22 changes: 11 additions & 11 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "Vapi"
version = "0.0.9"
version = "0.0.0-alpha0"
description = ""
readme = "README.md"
authors = []
Expand Down
14 changes: 7 additions & 7 deletions src/vapi/core/client_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = {
"X-Fern-Language": "Python",
"X-Fern-SDK-Name": "Vapi",
"X-Fern-SDK-Version": "0.0.9",
"X-Fern-SDK-Version": "0.0.0-alpha0",
}
headers["Authorization"] = f"Bearer {self._get_token()}"
return headers
Expand Down Expand Up @@ -52,9 +52,9 @@ def __init__(
super().__init__(token=token, base_url=base_url, timeout=timeout)
self.httpx_client = HttpClient(
httpx_client=httpx_client,
base_headers=self.get_headers(),
base_timeout=self.get_timeout(),
base_url=self.get_base_url(),
base_headers=self.get_headers,
base_timeout=self.get_timeout,
base_url=self.get_base_url,
)


Expand All @@ -70,7 +70,7 @@ def __init__(
super().__init__(token=token, base_url=base_url, timeout=timeout)
self.httpx_client = AsyncHttpClient(
httpx_client=httpx_client,
base_headers=self.get_headers(),
base_timeout=self.get_timeout(),
base_url=self.get_base_url(),
base_headers=self.get_headers,
base_timeout=self.get_timeout,
base_url=self.get_base_url,
)
38 changes: 22 additions & 16 deletions src/vapi/core/http_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,17 +152,20 @@ def __init__(
self,
*,
httpx_client: httpx.Client,
base_timeout: typing.Optional[float],
base_headers: typing.Dict[str, str],
base_url: typing.Optional[str] = None,
base_timeout: typing.Callable[[], typing.Optional[float]],
base_headers: typing.Callable[[], typing.Dict[str, str]],
base_url: typing.Optional[typing.Callable[[], str]] = None,
):
self.base_url = base_url
self.base_timeout = base_timeout
self.base_headers = base_headers
self.httpx_client = httpx_client

def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
base_url = self.base_url if maybe_base_url is None else maybe_base_url
base_url = maybe_base_url
if self.base_url is not None and base_url is None:
base_url = self.base_url()

if base_url is None:
raise ValueError("A base_url is required to make this request, please provide one and try again.")
return base_url
Expand All @@ -187,7 +190,7 @@ def request(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -198,7 +201,7 @@ def request(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
}
Expand Down Expand Up @@ -271,7 +274,7 @@ def stream(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -282,7 +285,7 @@ def stream(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
}
Expand Down Expand Up @@ -321,17 +324,20 @@ def __init__(
self,
*,
httpx_client: httpx.AsyncClient,
base_timeout: typing.Optional[float],
base_headers: typing.Dict[str, str],
base_url: typing.Optional[str] = None,
base_timeout: typing.Callable[[], typing.Optional[float]],
base_headers: typing.Callable[[], typing.Dict[str, str]],
base_url: typing.Optional[typing.Callable[[], str]] = None,
):
self.base_url = base_url
self.base_timeout = base_timeout
self.base_headers = base_headers
self.httpx_client = httpx_client

def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
base_url = self.base_url if maybe_base_url is None else maybe_base_url
base_url = maybe_base_url
if self.base_url is not None and base_url is None:
base_url = self.base_url()

if base_url is None:
raise ValueError("A base_url is required to make this request, please provide one and try again.")
return base_url
Expand All @@ -356,7 +362,7 @@ async def request(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -368,7 +374,7 @@ async def request(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
}
Expand Down Expand Up @@ -438,7 +444,7 @@ async def stream(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -449,7 +455,7 @@ async def stream(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
}
Expand Down
29 changes: 26 additions & 3 deletions src/vapi/core/pydantic_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
)

else:
_fields_set = self.__fields_set__
_fields_set = self.__fields_set__.copy()

fields = _get_model_fields(self.__class__)
for name, field in fields.items():
Expand All @@ -162,9 +162,12 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
# If the default values are non-null act like they've been set
# This effectively allows exclude_unset to work like exclude_none where
# the latter passes through intentionally set none values.
if default != None:
if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]):
_fields_set.add(name)

if default is not None:
self.__fields_set__.add(name)

kwargs_with_defaults_exclude_unset_include_fields: typing.Any = {
"by_alias": True,
"exclude_unset": True,
Expand All @@ -177,13 +180,33 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write")


def _union_list_of_pydantic_dicts(
source: typing.List[typing.Any], destination: typing.List[typing.Any]
) -> typing.List[typing.Any]:
converted_list: typing.List[typing.Any] = []
for i, item in enumerate(source):
destination_value = destination[i] # type: ignore
if isinstance(item, dict):
converted_list.append(deep_union_pydantic_dicts(item, destination_value))
elif isinstance(item, list):
converted_list.append(_union_list_of_pydantic_dicts(item, destination_value))
else:
converted_list.append(item)
return converted_list


def deep_union_pydantic_dicts(
source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
for key, value in source.items():
node = destination.setdefault(key, {})
if isinstance(value, dict):
node = destination.setdefault(key, {})
deep_union_pydantic_dicts(value, node)
# Note: we do not do this same processing for sets given we do not have sets of models
# and given the sets are unordered, the processing of the set and matching objects would
# be non-trivial.
elif isinstance(value, list):
destination[key] = _union_list_of_pydantic_dicts(value, node)
else:
destination[key] = value

Expand Down

0 comments on commit 9ee3ac6

Please sign in to comment.