Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(python): openai instrumentator #35

Merged
merged 51 commits into from
Jan 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
5ff89ef
feat: openai instrumentator
RogerHYang Jan 4, 2024
dc39934
ci default working directory
RogerHYang Jan 4, 2024
0ffecc7
clean up
RogerHYang Jan 4, 2024
b9e0654
clean up
RogerHYang Jan 4, 2024
31d3d1a
clean up
RogerHYang Jan 4, 2024
83d56eb
clean up
RogerHYang Jan 4, 2024
189f673
clean up
RogerHYang Jan 5, 2024
05a49fb
update README
RogerHYang Jan 5, 2024
525e65c
clean up
RogerHYang Jan 5, 2024
fa16b4a
Merge branch 'main' into openai-instrumentor
RogerHYang Jan 9, 2024
f395f58
fix indent
RogerHYang Jan 9, 2024
dbe7bc3
add OpenInferenceSpanKindValues
RogerHYang Jan 9, 2024
4cd42cd
use OpenInferenceSpanKindValues
RogerHYang Jan 9, 2024
d6c4c5c
Merge branch 'main' into openai-instrumentor
RogerHYang Jan 9, 2024
950ac9d
Merge branch 'main' into openai-instrumentor
RogerHYang Jan 9, 2024
bc27781
tox testing
RogerHYang Jan 9, 2024
9c2096e
tox testing
RogerHYang Jan 9, 2024
1fb1038
tox testing
RogerHYang Jan 9, 2024
0d142ad
wip
RogerHYang Jan 9, 2024
7753472
tox testing
RogerHYang Jan 9, 2024
4ed83fd
tox testing
RogerHYang Jan 9, 2024
8439043
tox testing
RogerHYang Jan 9, 2024
ec12409
tox testing
RogerHYang Jan 9, 2024
4e91512
Merge branch 'main' into openai-instrumentor
RogerHYang Jan 9, 2024
2457044
wip
RogerHYang Jan 9, 2024
b965721
wip
RogerHYang Jan 9, 2024
f17680b
wip
RogerHYang Jan 9, 2024
479b2d6
wip
RogerHYang Jan 9, 2024
09fb901
wip
RogerHYang Jan 9, 2024
f3211dd
wip
RogerHYang Jan 9, 2024
c533f9f
wip
RogerHYang Jan 9, 2024
7ed318b
wip
RogerHYang Jan 9, 2024
3c08576
wip
RogerHYang Jan 9, 2024
ff84687
wip
RogerHYang Jan 9, 2024
9acb9c2
wip
RogerHYang Jan 9, 2024
164ca95
wip
RogerHYang Jan 9, 2024
6d75955
wip
RogerHYang Jan 9, 2024
b70a56d
wip
RogerHYang Jan 9, 2024
e3c538d
wip
RogerHYang Jan 9, 2024
f344c32
wip
RogerHYang Jan 9, 2024
1593c6d
wip
RogerHYang Jan 9, 2024
444906b
wip
RogerHYang Jan 9, 2024
5fd32cc
wip
RogerHYang Jan 9, 2024
5e5546d
Merge branch 'main' into openai-instrumentor
RogerHYang Jan 9, 2024
76a3777
Merge branch 'main' into openai-instrumentor
RogerHYang Jan 10, 2024
9029fa8
wip
RogerHYang Jan 11, 2024
3c1b5a4
Merge branch 'main' into openai-instrumentor
RogerHYang Jan 11, 2024
e5ccc6d
wip
RogerHYang Jan 11, 2024
c20f488
wip
RogerHYang Jan 11, 2024
6bd830c
wip
RogerHYang Jan 11, 2024
50be6c8
wip
RogerHYang Jan 11, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions .github/workflows/python-CI.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Python CI

on:
push:
branches: [main]
pull_request:
paths:
- "python/**"

defaults:
run:
working-directory: ./python

jobs:
ci:
name: CI Python
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: |
3.8
3.11
- run: pip install tox==4.11.4
- run: tox run-parallel
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@ OpenInference provides a set of instrumentations for popular machine learning SD

## Python

| Package | Description |
| --------------------------------------------------------------------------------------------- | --------------------------------------------- |
| [`openinference-semantic-conventions`](./python/openinference-semantic-conventions/README.md) | Semantic conventions for tracing of LLM Apps. |
| Package | Description |
|--------------------------------------------------------------------------------------------------------------------|-----------------------------------------------|
| [`openinference-semantic-conventions`](./python/openinference-semantic-conventions/README.md) | Semantic conventions for tracing of LLM Apps. |
| [`openinference-instrumentation-openai`](./python/instrumentation/openinference-instrumentation-openai/README.rst) | OpenInference Instrumentation for OpenAI SDK. |

## JavaScript

Expand Down
3 changes: 3 additions & 0 deletions python/dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
pytest == 7.4.4
ruff == 0.1.11
mypy == 1.8.0
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import openai
from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

resource = Resource(attributes={})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces")
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
tracer_provider.add_span_processor(span_processor=span_processor)
trace_api.set_tracer_provider(tracer_provider=tracer_provider)

OpenAIInstrumentor().instrument()


if __name__ == "__main__":
response = openai.OpenAI().chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Write a haiku."}],
max_tokens=20,
)
print(response.choices[0].message.content)
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import asyncio

import openai
from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

resource = Resource(attributes={})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces")
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
tracer_provider.add_span_processor(span_processor=span_processor)
trace_api.set_tracer_provider(tracer_provider=tracer_provider)

OpenAIInstrumentor().instrument()


async def chat_completions(**kwargs):
client = openai.AsyncOpenAI()
async for chunk in await client.chat.completions.create(**kwargs):
if content := chunk.choices[0].delta.content:
print(content, end="")
print()


if __name__ == "__main__":
asyncio.run(
chat_completions(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Write a haiku."}],
max_tokens=20,
stream=True,
),
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import openai
from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

resource = Resource(attributes={})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces")
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
tracer_provider.add_span_processor(span_processor=span_processor)
trace_api.set_tracer_provider(tracer_provider=tracer_provider)

OpenAIInstrumentor().instrument()


if __name__ == "__main__":
response = openai.OpenAI().embeddings.create(
model="text-embedding-ada-002",
input="hello world",
)
print(response.data[0].embedding)
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from importlib import import_module

from openinference.instrumentation.openai import OpenAIInstrumentor
from opentelemetry import trace as trace_api
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import SimpleSpanProcessor

resource = Resource(attributes={})
tracer_provider = trace_sdk.TracerProvider(resource=resource)
span_exporter = OTLPSpanExporter(endpoint="http://127.0.0.1:6006/v1/traces")
span_processor = SimpleSpanProcessor(span_exporter=span_exporter)
tracer_provider.add_span_processor(span_processor=span_processor)
trace_api.set_tracer_provider(tracer_provider=tracer_provider)

HTTPXClientInstrumentor().instrument()
OpenAIInstrumentor().instrument()


if __name__ == "__main__":
openai = import_module("openai")
response = openai.OpenAI().chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Write a haiku."}],
max_tokens=20,
)
print(response.choices[0].message.content)
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,37 @@ readme = "README.rst"
license = "Apache-2.0"
requires-python = ">=3.8, <3.12"
authors = [
{ name = "OpenInference Authors", email = "[email protected]" },
{ name = "OpenInference Authors", email = "[email protected]" },
]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
dependencies = [
"opentelemetry-api",
"opentelemetry-instrumentation",
"opentelemetry-semantic-conventions",
"openinference-semantic-conventions",
"wrapt",
"opentelemetry-api",
"opentelemetry-instrumentation",
"opentelemetry-semantic-conventions",
"openinference-semantic-conventions",
"wrapt",
]

[project.optional-dependencies]
instruments = [
"openai >= 1.0.0",
]
test = [
"openai == 1.0.0",
"openai == 1.0.0",
"opentelemetry-sdk",
"opentelemetry-instrumentation-httpx",
"respx",
"numpy",
]

[project.urls]
Expand All @@ -44,8 +51,8 @@ path = "src/openinference/instrumentation/openai/version.py"

[tool.hatch.build.targets.sdist]
include = [
"/src",
"/tests",
"/src",
"/tests",
]

[tool.hatch.build.targets.wheel]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import logging
from importlib import import_module
from typing import Any, Collection

from openinference.instrumentation.openai._request import (
_AsyncRequest,
_Request,
)
from openinference.instrumentation.openai.package import _instruments
from openinference.instrumentation.openai.version import __version__
from opentelemetry import trace as trace_api
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor # type: ignore
from wrapt import wrap_function_wrapper

logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())

_MODULE = "openai"


class OpenAIInstrumentor(BaseInstrumentor): # type: ignore
"""
An instrumentor for openai
"""

__slots__ = (
"_original_request",
"_original_async_request",
)

def instrumentation_dependencies(self) -> Collection[str]:
return _instruments

def _instrument(self, **kwargs: Any) -> None:
if not (tracer_provider := kwargs.get("tracer_provider")):
tracer_provider = trace_api.get_tracer_provider()
tracer = trace_api.get_tracer(__name__, __version__, tracer_provider)
openai = import_module(_MODULE)
self._original_request = openai.OpenAI.request
self._original_async_request = openai.AsyncOpenAI.request
wrap_function_wrapper(
module=_MODULE,
name="OpenAI.request",
wrapper=_Request(tracer=tracer, openai=openai),
)
wrap_function_wrapper(
module=_MODULE,
name="AsyncOpenAI.request",
wrapper=_AsyncRequest(tracer=tracer, openai=openai),
)

def _uninstrument(self, **kwargs: Any) -> None:
openai = import_module(_MODULE)
openai.OpenAI.request = self._original_request
openai.AsyncOpenAI.request = self._original_async_request
Loading