From e8e5a0dc7ccf2db19d7f81991ee0987f9c3ae375 Mon Sep 17 00:00:00 2001 From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:37:44 -0400 Subject: [PATCH] release: 1.14.1 (#1239) * docs(readme): assistant streaming (#1238) * release: 1.14.1 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 8 ++ helpers.md | 161 ++++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- src/openai/_version.py | 2 +- 5 files changed, 172 insertions(+), 3 deletions(-) create mode 100644 helpers.md diff --git a/.release-please-manifest.json b/.release-please-manifest.json index e72f11310e..a780111df4 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.14.0" + ".": "1.14.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f0fc7556d..f7a80d39a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 1.14.1 (2024-03-15) + +Full Changelog: [v1.14.0...v1.14.1](https://github.com/openai/openai-python/compare/v1.14.0...v1.14.1) + +### Documentation + +* **readme:** assistant streaming ([#1238](https://github.com/openai/openai-python/issues/1238)) ([0fc30a2](https://github.com/openai/openai-python/commit/0fc30a23030b4ff60f27cd2f472517926ed0f300)) + ## 1.14.0 (2024-03-13) Full Changelog: [v1.13.4...v1.14.0](https://github.com/openai/openai-python/compare/v1.13.4...v1.14.0) diff --git a/helpers.md b/helpers.md new file mode 100644 index 0000000000..03fd5e76b7 --- /dev/null +++ b/helpers.md @@ -0,0 +1,161 @@ +# Streaming Helpers + +OpenAI supports streaming responses when interacting with the [Assistant](#assistant-streaming-api) APIs. + +## Assistant Streaming API + +OpenAI supports streaming responses from Assistants. The SDK provides convenience wrappers around the API +so you can subscribe to the types of events you are interested in as well as receive accumulated responses. + +More information can be found in the documentation: [Assistant Streaming](https://platform.openai.com/docs/assistants/overview?lang=python) + +#### An example of creating a run and subscribing to some events + +You can subscribe to events by creating an event handler class and overloading the relevant event handlers. + +```python +from typing_extensions import override +from openai import AssistantEventHandler + +# First, we create a EventHandler class to define +# how we want to handle the events in the response stream. + +class EventHandler(AssistantEventHandler): + @override + def on_text_created(self, text) -> None: + print(f"\nassistant > ", end="", flush=True) + + @override + def on_text_delta(self, delta, snapshot): + print(delta.value, end="", flush=True) + + def on_tool_call_created(self, tool_call): + print(f"\nassistant > {tool_call.type}\n", flush=True) + + def on_tool_call_delta(self, delta, snapshot): + if delta.type == 'code_interpreter': + if delta.code_interpreter.input: + print(delta.code_interpreter.input, end="", flush=True) + if delta.code_interpreter.outputs: + print(f"\n\noutput >", flush=True) + for output in delta.code_interpreter.outputs: + if output.type == "logs": + print(f"\n{output.logs}", flush=True) + +# Then, we use the `create_and_stream` SDK helper +# with the `EventHandler` class to create the Run +# and stream the response. + +with client.beta.threads.runs.create_and_stream( + thread_id=thread.id, + assistant_id=assistant.id, + instructions="Please address the user as Jane Doe. The user has a premium account.", + event_handler=EventHandler(), +) as stream: + stream.until_done() +``` + +### Assistant Events + +The assistant API provides events you can subscribe to for the following events. + +```python +def on_event(self, event: AssistantStreamEvent) +``` + +This allows you to subscribe to all the possible raw events sent by the OpenAI streaming API. +In many cases it will be more convenient to subscribe to a more specific set of events for your use case. + +More information on the types of events can be found here: [Events](https://platform.openai.com/docs/api-reference/assistants-streaming/events) + +```python +def on_run_step_created(self, run_step: RunStep) +def on_run_step_delta(self, delta: RunStepDelta, snapshot: RunStep) +def on_run_step_done(self, run_step: RunStep) +``` + +These events allow you to subscribe to the creation, delta and completion of a RunStep. + +For more information on how Runs and RunSteps work see the documentation [Runs and RunSteps](https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps) + +```python +def on_message_created(self, message: Message) +def on_message_delta(self, delta: MessageDelta, snapshot: Message) +def on_message_done(self, message: Message) +``` + +This allows you to subscribe to Message creation, delta and completion events. Messages can contain +different types of content that can be sent from a model (and events are available for specific content types). +For convenience, the delta event includes both the incremental update and an accumulated snapshot of the content. + +More information on messages can be found +on in the documentation page [Message](https://platform.openai.com/docs/api-reference/messages/object). + +```python +def on_text_created(self, text: Text) +def on_text_delta(self, delta: TextDelta, snapshot: Text) +def on_text_done(self, text: Text) +``` + +These events allow you to subscribe to the creation, delta and completion of a Text content (a specific type of message). +For convenience, the delta event includes both the incremental update and an accumulated snapshot of the content. + +```python +def on_image_file_done(self, image_file: ImageFile) +``` + +Image files are not sent incrementally so an event is provided for when a image file is available. + +```python +def on_tool_call_created(self, tool_call: ToolCall) +def on_tool_call_delta(self, delta: ToolCallDelta, snapshot: ToolCall) +def on_tool_call_done(self, tool_call: ToolCall) +``` + +These events allow you to subscribe to events for the creation, delta and completion of a ToolCall. + +More information on tools can be found here [Tools](https://platform.openai.com/docs/assistants/tools) + +```python +def on_end(self) +``` + +The last event send when a stream ends. + +```python +def on_timeout(self) +``` + +This event is triggered if the request times out. + +```python +def on_exception(self, exception: Exception) +``` + +This event is triggered if an exception occurs during streaming. + +### Assistant Methods + +The assistant streaming object also provides a few methods for convenience: + +```python +def current_event() +def current_run() +def current_message_snapshot() +def current_run_step_snapshot() +``` + +These methods are provided to allow you to access additional context from within event handlers. In many cases +the handlers should include all the information you need for processing, but if additional context is required it +can be accessed. + +Note: There is not always a relevant context in certain situations (these will be undefined in those cases). + +```python +def get_final_run(self) +def get_final_run_steps(self) +def get_final_messages(self) +``` + +These methods are provided for convenience to collect information at the end of a stream. Calling these events +will trigger consumption of the stream until completion and then return the relevant accumulated objects. diff --git a/pyproject.toml b/pyproject.toml index 0856032512..d562977dbd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openai" -version = "1.14.0" +version = "1.14.1" description = "The official Python library for the openai API" readme = "README.md" license = "Apache-2.0" diff --git a/src/openai/_version.py b/src/openai/_version.py index 134799ff42..3f5331b8e0 100644 --- a/src/openai/_version.py +++ b/src/openai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. __title__ = "openai" -__version__ = "1.14.0" # x-release-please-version +__version__ = "1.14.1" # x-release-please-version