Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Don't truncate numpy array dimensions below max #792

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/plugins/build_llms_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def on_page_content(html: str, page: Page, config: MkDocsConfig, files: Files) -

# The API reference generates HTML tables with line numbers, this strips the line numbers cell and goes back to a code block
for extra in soup.find_all('table', attrs={'class': 'highlighttable'}):
extra.replace_with(BeautifulSoup(f'<pre>{extra.find('code').get_text()}</pre>', 'html.parser'))
extra.replace_with(BeautifulSoup(f'<pre>{extra.find("code").get_text()}</pre>', 'html.parser'))

with open(os.path.join(config.site_dir, 'llms.txt'), 'a', encoding='utf-8') as f:
f.write(MarkdownConverter().convert_soup(soup)) # type: ignore[reportUnknownMemberType]
Expand Down
8 changes: 3 additions & 5 deletions logfire/_internal/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,14 +318,12 @@ def configure( # noqa: D417
collect_system_metrics = deprecated_kwargs.pop('collect_system_metrics', None) # type: ignore
if collect_system_metrics is False:
raise ValueError(
'The `collect_system_metrics` argument has been removed. '
'System metrics are no longer collected by default.'
'The `collect_system_metrics` argument has been removed. System metrics are no longer collected by default.'
)

if collect_system_metrics is not None:
raise ValueError(
'The `collect_system_metrics` argument has been removed. '
'Use `logfire.instrument_system_metrics()` instead.'
'The `collect_system_metrics` argument has been removed. Use `logfire.instrument_system_metrics()` instead.'
)

scrubbing_callback = deprecated_kwargs.pop('scrubbing_callback', None) # type: ignore
Expand Down Expand Up @@ -1332,7 +1330,7 @@ def use_existing_project(
[f'{index}. {item[0]}/{item[1]}' for index, item in project_choices.items()]
)
selected_project_key = Prompt.ask(
f'Please select one of the following projects by number:\n' f'{project_choices_str}\n',
f'Please select one of the following projects by number:\n{project_choices_str}\n',
choices=list(project_choices.keys()),
default='1',
)
Expand Down
6 changes: 3 additions & 3 deletions logfire/_internal/integrations/llm_providers/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def is_async_client(
"""Returns whether or not the `client` class is async."""
if issubclass(client, (anthropic.Anthropic, anthropic.AnthropicBedrock)):
return False
assert issubclass(
client, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)
), f'Expected Anthropic, AsyncAnthropic, AnthropicBedrock or AsyncAnthropicBedrock type, got: {client}'
assert issubclass(client, (anthropic.AsyncAnthropic, anthropic.AsyncAnthropicBedrock)), (
f'Expected Anthropic, AsyncAnthropic, AnthropicBedrock or AsyncAnthropicBedrock type, got: {client}'
)
return True
4 changes: 3 additions & 1 deletion logfire/_internal/json_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,10 @@ def _numpy_array_encoder(o: Any, seen: set[int]) -> JsonValue:
o = o.A # type: ignore[reportUnknownMemberType]

for dimension in range(dimensions):
if shape[dimension] <= NUMPY_DIMENSION_MAX_SIZE:
continue
# In case of multiple dimensions, we limit the dimension size by the NUMPY_DIMENSION_MAX_SIZE.
half = min(shape[dimension], NUMPY_DIMENSION_MAX_SIZE) // 2
half = NUMPY_DIMENSION_MAX_SIZE // 2
# Slicing and concatenating arrays along the specified axis
slices = [slice(None)] * dimensions
slices[dimension] = slice(0, half)
Expand Down
2 changes: 1 addition & 1 deletion logfire/_internal/json_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def _numpy_schema(obj: Any, seen: set[int]) -> JsonDict:
return {
'type': 'array',
'x-python-datatype': 'ndarray',
'x-shape': to_json_value(obj.shape, seen), # type: ignore[reportUnknownMemberType]
'x-shape': to_json_value(obj.shape, seen),
'x-dtype': str(obj.dtype), # type: ignore
}

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,7 @@ docs = [

[tool.inline-snapshot]
default-flags = ["disable"]
format-command="ruff format --stdin-filename {filename}"

[tool.uv.sources]
logfire-api = { workspace = true }
Expand Down
2 changes: 1 addition & 1 deletion tests/test_console_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -776,7 +776,7 @@ def test_exception(exporter: TestExporter) -> None:
SimpleConsoleSpanExporter(output=out, colors='always').export(exporter.exported_spans)
assert out.getvalue().splitlines() == [
'\x1b[32m00:00:01.000\x1b[0m \x1b[31merror!!! test\x1b[0m',
'\x1b[34m │ \x1b[0m\x1b[1;31mZeroDivisionError: ' '\x1b[0mdivision by zero',
'\x1b[34m │ \x1b[0m\x1b[1;31mZeroDivisionError: \x1b[0mdivision by zero',
'\x1b[97;49m \x1b[0m\x1b[35;49m│\x1b[0m\x1b[97;49m '
'\x1b[0m\x1b[97;49mTraceback\x1b[0m\x1b[97;49m '
'\x1b[0m\x1b[97;49m(\x1b[0m\x1b[97;49mmost\x1b[0m\x1b[97;49m '
Expand Down
82 changes: 81 additions & 1 deletion tests/test_json_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -823,7 +823,7 @@ class StrSubclass(str):
pytest.param(
[{str: bytes, int: float}],
"[{<class 'str'>: <class 'bytes'>, <class 'int'>: <class 'float'>}]",
'[{"<class \'str\'>":"<class \'bytes\'>","<class \'int\'>":"<class ' "'float'>\"}]",
'[{"<class \'str\'>":"<class \'bytes\'>","<class \'int\'>":"<class \'float\'>"}]',
{
'items': {
'properties': {
Expand Down Expand Up @@ -1235,3 +1235,83 @@ class Model:
}
]
)


def test_numpy_array_truncation(exporter: TestExporter):
logfire.info('hi', m=numpy.arange(13 * 3 * 11).reshape(13, 3, 11))

truncated = [
[
[0, 1, 2, 3, 4, 6, 7, 8, 9, 10],
[11, 12, 13, 14, 15, 17, 18, 19, 20, 21],
[22, 23, 24, 25, 26, 28, 29, 30, 31, 32],
],
[
[33, 34, 35, 36, 37, 39, 40, 41, 42, 43],
[44, 45, 46, 47, 48, 50, 51, 52, 53, 54],
[55, 56, 57, 58, 59, 61, 62, 63, 64, 65],
],
[
[66, 67, 68, 69, 70, 72, 73, 74, 75, 76],
[77, 78, 79, 80, 81, 83, 84, 85, 86, 87],
[88, 89, 90, 91, 92, 94, 95, 96, 97, 98],
],
[
[99, 100, 101, 102, 103, 105, 106, 107, 108, 109],
[110, 111, 112, 113, 114, 116, 117, 118, 119, 120],
[121, 122, 123, 124, 125, 127, 128, 129, 130, 131],
],
[
[132, 133, 134, 135, 136, 138, 139, 140, 141, 142],
[143, 144, 145, 146, 147, 149, 150, 151, 152, 153],
[154, 155, 156, 157, 158, 160, 161, 162, 163, 164],
],
[
[264, 265, 266, 267, 268, 270, 271, 272, 273, 274],
[275, 276, 277, 278, 279, 281, 282, 283, 284, 285],
[286, 287, 288, 289, 290, 292, 293, 294, 295, 296],
],
[
[297, 298, 299, 300, 301, 303, 304, 305, 306, 307],
[308, 309, 310, 311, 312, 314, 315, 316, 317, 318],
[319, 320, 321, 322, 323, 325, 326, 327, 328, 329],
],
[
[330, 331, 332, 333, 334, 336, 337, 338, 339, 340],
[341, 342, 343, 344, 345, 347, 348, 349, 350, 351],
[352, 353, 354, 355, 356, 358, 359, 360, 361, 362],
],
[
[363, 364, 365, 366, 367, 369, 370, 371, 372, 373],
[374, 375, 376, 377, 378, 380, 381, 382, 383, 384],
[385, 386, 387, 388, 389, 391, 392, 393, 394, 395],
],
[
[396, 397, 398, 399, 400, 402, 403, 404, 405, 406],
[407, 408, 409, 410, 411, 413, 414, 415, 416, 417],
[418, 419, 420, 421, 422, 424, 425, 426, 427, 428],
],
]
assert numpy.array(truncated).shape == (10, 3, 10)
assert exporter.exported_spans_as_dict() == snapshot(
[
{
'name': 'hi',
'context': {'trace_id': 1, 'span_id': 1, 'is_remote': False},
'parent': None,
'start_time': 1000000000,
'end_time': 1000000000,
'attributes': {
'logfire.span_type': 'log',
'logfire.level_num': 9,
'logfire.msg_template': 'hi',
'logfire.msg': 'hi',
'code.filepath': 'test_json_args.py',
'code.function': 'test_numpy_array_truncation',
'code.lineno': 123,
'm': IsJson(truncated),
'logfire.json_schema': '{"type":"object","properties":{"m":{"type":"array","x-python-datatype":"ndarray","x-shape":[13,3,11],"x-dtype":"int64"}}}',
},
}
]
)
4 changes: 2 additions & 2 deletions tests/test_tail_sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,7 @@ def test_span_levels():

def test_invalid_rates():
with inline_snapshot.extra.raises(
snapshot('ValueError: Invalid sampling rates, ' 'must be 0.0 <= background_rate <= head <= 1.0')
snapshot('ValueError: Invalid sampling rates, must be 0.0 <= background_rate <= head <= 1.0')
):
logfire.SamplingOptions.level_or_duration(background_rate=-1)
with pytest.raises(ValueError):
Expand All @@ -502,7 +502,7 @@ def test_trace_sample_rate(config_kwargs: dict[str, Any]):
assert logfire.DEFAULT_LOGFIRE_INSTANCE.config.sampling.head == 0.123
assert len(warnings) == 1
assert str(warnings[0].message) == snapshot(
'The `trace_sample_rate` argument is deprecated. ' 'Use `sampling=logfire.SamplingOptions(head=...)` instead.'
'The `trace_sample_rate` argument is deprecated. Use `sampling=logfire.SamplingOptions(head=...)` instead.'
)


Expand Down
Loading
Loading