Skip to content

Commit

Permalink
SDK regeneration
Browse files Browse the repository at this point in the history
  • Loading branch information
fern-api[bot] committed Jan 15, 2025
1 parent 8c99227 commit 988356c
Show file tree
Hide file tree
Showing 7 changed files with 110 additions and 41 deletions.
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
[project]
name = "hume"
[tool.poetry]
name = "hume"
version = "0.7.6"
Expand Down
3 changes: 2 additions & 1 deletion src/hume/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from .api_error import ApiError
from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper
from .datetime_utils import serialize_datetime
from .file import File, convert_file_dict_to_httpx_tuples
from .file import File, convert_file_dict_to_httpx_tuples, with_content_type
from .http_client import AsyncHttpClient, HttpClient
from .jsonable_encoder import jsonable_encoder
from .pagination import AsyncPager, SyncPager
Expand Down Expand Up @@ -46,4 +46,5 @@
"universal_field_validator",
"universal_root_validator",
"update_forward_refs",
"with_content_type",
]
41 changes: 30 additions & 11 deletions src/hume/core/file.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
# This file was auto-generated by Fern from our API Definition.

import typing
from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast

# File typing inspired by the flexibility of types within the httpx library
# https://github.com/encode/httpx/blob/master/httpx/_types.py
FileContent = typing.Union[typing.IO[bytes], bytes, str]
File = typing.Union[
FileContent = Union[IO[bytes], bytes, str]
File = Union[
# file (or bytes)
FileContent,
# (filename, file (or bytes))
typing.Tuple[typing.Optional[str], FileContent],
Tuple[Optional[str], FileContent],
# (filename, file (or bytes), content_type)
typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]],
Tuple[Optional[str], FileContent, Optional[str]],
# (filename, file (or bytes), content_type, headers)
typing.Tuple[
typing.Optional[str],
Tuple[
Optional[str],
FileContent,
typing.Optional[str],
typing.Mapping[str, str],
Optional[str],
Mapping[str, str],
],
]


def convert_file_dict_to_httpx_tuples(
d: typing.Dict[str, typing.Union[File, typing.List[File]]],
) -> typing.List[typing.Tuple[str, File]]:
d: Dict[str, Union[File, List[File]]],
) -> List[Tuple[str, File]]:
"""
The format we use is a list of tuples, where the first element is the
name of the file and the second is the file object. Typically HTTPX wants
Expand All @@ -41,3 +41,22 @@ def convert_file_dict_to_httpx_tuples(
else:
httpx_tuples.append((key, file_like))
return httpx_tuples


def with_content_type(*, file: File, content_type: str) -> File:
""" """
if isinstance(file, tuple):
if len(file) == 2:
filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore
return (filename, content, content_type)
elif len(file) == 3:
filename, content, _ = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore
return (filename, content, content_type)
elif len(file) == 4:
filename, content, _, headers = cast( # type: ignore
Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file
)
return (filename, content, content_type, headers)
else:
raise ValueError(f"Unexpected tuple length: {len(file)}")
return (None, file, content_type)
46 changes: 28 additions & 18 deletions src/hume/core/http_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,17 +152,20 @@ def __init__(
self,
*,
httpx_client: httpx.Client,
base_timeout: typing.Optional[float],
base_headers: typing.Dict[str, str],
base_url: typing.Optional[str] = None,
base_timeout: typing.Callable[[], typing.Optional[float]],
base_headers: typing.Callable[[], typing.Dict[str, str]],
base_url: typing.Optional[typing.Callable[[], str]] = None,
):
self.base_url = base_url
self.base_timeout = base_timeout
self.base_headers = base_headers
self.httpx_client = httpx_client

def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
base_url = self.base_url if maybe_base_url is None else maybe_base_url
base_url = maybe_base_url
if self.base_url is not None and base_url is None:
base_url = self.base_url()

if base_url is None:
raise ValueError("A base_url is required to make this request, please provide one and try again.")
return base_url
Expand All @@ -187,7 +190,7 @@ def request(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -198,7 +201,7 @@ def request(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
}
Expand All @@ -224,7 +227,9 @@ def request(
json=json_body,
data=data_body,
content=content,
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files))
if (files is not None and files is not omit)
else None,
timeout=timeout,
)

Expand Down Expand Up @@ -269,7 +274,7 @@ def stream(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -280,7 +285,7 @@ def stream(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
}
Expand All @@ -306,7 +311,9 @@ def stream(
json=json_body,
data=data_body,
content=content,
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files)) if files is not None else None,
files=convert_file_dict_to_httpx_tuples(remove_none_from_dict(files))
if (files is not None and files is not omit)
else None,
timeout=timeout,
) as stream:
yield stream
Expand All @@ -317,17 +324,20 @@ def __init__(
self,
*,
httpx_client: httpx.AsyncClient,
base_timeout: typing.Optional[float],
base_headers: typing.Dict[str, str],
base_url: typing.Optional[str] = None,
base_timeout: typing.Callable[[], typing.Optional[float]],
base_headers: typing.Callable[[], typing.Dict[str, str]],
base_url: typing.Optional[typing.Callable[[], str]] = None,
):
self.base_url = base_url
self.base_timeout = base_timeout
self.base_headers = base_headers
self.httpx_client = httpx_client

def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
base_url = self.base_url if maybe_base_url is None else maybe_base_url
base_url = maybe_base_url
if self.base_url is not None and base_url is None:
base_url = self.base_url()

if base_url is None:
raise ValueError("A base_url is required to make this request, please provide one and try again.")
return base_url
Expand All @@ -352,7 +362,7 @@ async def request(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -364,7 +374,7 @@ async def request(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
}
Expand Down Expand Up @@ -434,7 +444,7 @@ async def stream(
timeout = (
request_options.get("timeout_in_seconds")
if request_options is not None and request_options.get("timeout_in_seconds") is not None
else self.base_timeout
else self.base_timeout()
)

json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
Expand All @@ -445,7 +455,7 @@ async def stream(
headers=jsonable_encoder(
remove_none_from_dict(
{
**self.base_headers,
**self.base_headers(),
**(headers if headers is not None else {}),
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
}
Expand Down
37 changes: 30 additions & 7 deletions src/hume/core/pydantic_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,15 +97,15 @@ class Config:

@classmethod
def model_construct(
cls: type[Model], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
) -> Model:
cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
) -> "Model":
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
return cls.construct(_fields_set, **dealiased_object)

@classmethod
def construct(
cls: type[Model], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
) -> Model:
cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
) -> "Model":
dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
if IS_PYDANTIC_V2:
return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2
Expand Down Expand Up @@ -152,7 +152,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
)

else:
_fields_set = self.__fields_set__
_fields_set = self.__fields_set__.copy()

fields = _get_model_fields(self.__class__)
for name, field in fields.items():
Expand All @@ -162,9 +162,12 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
# If the default values are non-null act like they've been set
# This effectively allows exclude_unset to work like exclude_none where
# the latter passes through intentionally set none values.
if default != None:
if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]):
_fields_set.add(name)

if default is not None:
self.__fields_set__.add(name)

kwargs_with_defaults_exclude_unset_include_fields: typing.Any = {
"by_alias": True,
"exclude_unset": True,
Expand All @@ -177,13 +180,33 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write")


def _union_list_of_pydantic_dicts(
source: typing.List[typing.Any], destination: typing.List[typing.Any]
) -> typing.List[typing.Any]:
converted_list: typing.List[typing.Any] = []
for i, item in enumerate(source):
destination_value = destination[i] # type: ignore
if isinstance(item, dict):
converted_list.append(deep_union_pydantic_dicts(item, destination_value))
elif isinstance(item, list):
converted_list.append(_union_list_of_pydantic_dicts(item, destination_value))
else:
converted_list.append(item)
return converted_list


def deep_union_pydantic_dicts(
source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
) -> typing.Dict[str, typing.Any]:
for key, value in source.items():
node = destination.setdefault(key, {})
if isinstance(value, dict):
node = destination.setdefault(key, {})
deep_union_pydantic_dicts(value, node)
# Note: we do not do this same processing for sets given we do not have sets of models
# and given the sets are unordered, the processing of the set and matching objects would
# be non-trivial.
elif isinstance(value, list):
destination[key] = _union_list_of_pydantic_dicts(value, node)
else:
destination[key] = value

Expand Down
18 changes: 18 additions & 0 deletions src/hume/core/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,24 @@ def convert_and_respect_annotation_metadata(
if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping):
return _convert_mapping(object_, clean_type, direction)

if (
typing_extensions.get_origin(clean_type) == typing.Dict
or typing_extensions.get_origin(clean_type) == dict
or clean_type == typing.Dict
) and isinstance(object_, typing.Dict):
key_type = typing_extensions.get_args(clean_type)[0]
value_type = typing_extensions.get_args(clean_type)[1]

return {
key: convert_and_respect_annotation_metadata(
object_=value,
annotation=annotation,
inner_type=value_type,
direction=direction,
)
for key, value in object_.items()
}

# If you're iterating on a string, do not bother to coerce it to a sequence.
if not isinstance(object_, str):
if (
Expand Down
4 changes: 2 additions & 2 deletions src/hume/expression_measurement/batch/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def start_inference_job_from_local_file(
self,
*,
file: typing.List[core.File],
json: typing.Optional[InferenceBaseRequest] = None,
json: typing.Optional[InferenceBaseRequest] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> str:
"""
Expand Down Expand Up @@ -766,7 +766,7 @@ async def start_inference_job_from_local_file(
self,
*,
file: typing.List[core.File],
json: typing.Optional[InferenceBaseRequest] = None,
json: typing.Optional[InferenceBaseRequest] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> str:
"""
Expand Down

0 comments on commit 988356c

Please sign in to comment.