Module livekit.plugins.openai.beta

Sub-modules

livekit.plugins.openai.beta.assistant_llm

Classes

class AssistantCreateOptions (name: str, instructions: str, model: ChatModels, temperature: float | None = None)

AssistantCreateOptions(name: 'str', instructions: 'str', model: 'ChatModels', temperature: 'float | None' = None)

Expand source code
@dataclass
class AssistantCreateOptions:
    name: str
    instructions: str
    model: ChatModels
    temperature: float | None = None
    # TODO: when we implement code_interpreter and file_search tools
    # tool_resources: ToolResources | None = None
    # tools: list[AssistantTools] = field(default_factory=list)

Class variables

var instructions : str
var model : Literal['gpt-4o', 'gpt-4o-2024-05-13', 'gpt-4o-mini', 'gpt-4o-mini-2024-07-18', 'gpt-4-turbo', 'gpt-4-turbo-2024-04-09', 'gpt-4-turbo-preview', 'gpt-4-0125-preview', 'gpt-4-1106-preview', 'gpt-4-vision-preview', 'gpt-4-1106-vision-preview', 'gpt-4', 'gpt-4-0314', 'gpt-4-0613', 'gpt-4-32k', 'gpt-4-32k-0314', 'gpt-4-32k-0613', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0301', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-1106', 'gpt-3.5-turbo-16k-0613']
var name : str
var temperature : float | None
class AssistantLLM (*, assistant_opts: AssistantOptions, client: AsyncClient | None = None, api_key: str | None = None, base_url: str | None = None, on_file_uploaded: OnFileUploaded | None = None)

Helper class that provides a standard way to create an ABC using inheritance.

Expand source code
class AssistantLLM(llm.LLM):
    def __init__(
        self,
        *,
        assistant_opts: AssistantOptions,
        client: AsyncClient | None = None,
        api_key: str | None = None,
        base_url: str | None = None,
        on_file_uploaded: OnFileUploaded | None = None,
    ) -> None:
        super().__init__()

        test_ctx = llm.ChatContext()
        if not hasattr(test_ctx, "_metadata"):
            raise Exception(
                "This beta feature of 'livekit-plugins-openai' requires a newer version of 'livekit-agents'"
            )
        self._client = client or AsyncClient(
            api_key=api_key,
            base_url=base_url,
            http_client=httpx.AsyncClient(
                timeout=httpx.Timeout(timeout=30, connect=10, read=5, pool=5),
                follow_redirects=True,
                limits=httpx.Limits(
                    max_connections=1000,
                    max_keepalive_connections=100,
                    keepalive_expiry=120,
                ),
            ),
        )
        self._assistant_opts = assistant_opts
        self._running_fncs: MutableSet[asyncio.Task[Any]] = set()
        self._on_file_uploaded = on_file_uploaded
        self._tool_call_run_id_lookup = dict[str, str]()
        self._submitted_tool_calls = set[str]()

        self._sync_openai_task: asyncio.Task[AssistantLoadOptions] | None = None
        try:
            self._sync_openai_task = asyncio.create_task(self._sync_openai())
        except Exception:
            logger.error(
                "failed to create sync openai task. This can happen when instantiating without a running asyncio event loop (such has when running tests)"
            )
        self._done_futures = list[asyncio.Future[None]]()

    async def _sync_openai(self) -> AssistantLoadOptions:
        if self._assistant_opts.create_options:
            kwargs: Dict[str, Any] = {
                "model": self._assistant_opts.create_options.model,
                "name": self._assistant_opts.create_options.name,
                "instructions": self._assistant_opts.create_options.instructions,
                # "tools": [
                #     {"type": t} for t in self._assistant_opts.create_options.tools
                # ],
                # "tool_resources": self._assistant_opts.create_options.tool_resources,
            }
            # TODO when we implement code_interpreter and file_search tools
            # if self._assistant_opts.create_options.tool_resources:
            #     kwargs["tool_resources"] = (
            #         self._assistant_opts.create_options.tool_resources
            #     )
            if self._assistant_opts.create_options.temperature:
                kwargs["temperature"] = self._assistant_opts.create_options.temperature
            assistant = await self._client.beta.assistants.create(**kwargs)

            thread = await self._client.beta.threads.create()
            return AssistantLoadOptions(assistant_id=assistant.id, thread_id=thread.id)
        elif self._assistant_opts.load_options:
            if not self._assistant_opts.load_options.thread_id:
                thread = await self._client.beta.threads.create()
                self._assistant_opts.load_options.thread_id = thread.id
            return self._assistant_opts.load_options

        raise Exception("One of create_options or load_options must be set")

    def chat(
        self,
        *,
        chat_ctx: llm.ChatContext,
        fnc_ctx: llm.FunctionContext | None = None,
        temperature: float | None = None,
        n: int | None = None,
        parallel_tool_calls: bool | None = None,
    ):
        if n is not None:
            logger.warning("OpenAI Assistants does not support the 'n' parameter")

        if parallel_tool_calls is not None:
            logger.warning(
                "OpenAI Assistants does not support the 'parallel_tool_calls' parameter"
            )

        if not self._sync_openai_task:
            self._sync_openai_task = asyncio.create_task(self._sync_openai())

        return AssistantLLMStream(
            temperature=temperature,
            assistant_llm=self,
            sync_openai_task=self._sync_openai_task,
            client=self._client,
            chat_ctx=chat_ctx,
            fnc_ctx=fnc_ctx,
            on_file_uploaded=self._on_file_uploaded,
        )

    async def _register_tool_call(self, tool_call_id: str, run_id: str) -> None:
        self._tool_call_run_id_lookup[tool_call_id] = run_id

    async def _submit_tool_call_result(self, tool_call_id: str, result: str) -> None:
        if tool_call_id in self._submitted_tool_calls:
            return
        logger.debug(f"submitting tool call {tool_call_id} result")
        run_id = self._tool_call_run_id_lookup.get(tool_call_id)
        if not run_id:
            logger.error(f"tool call {tool_call_id} not found")
            return

        if not self._sync_openai_task:
            logger.error("sync_openai_task not set")
            return

        thread_id = (await self._sync_openai_task).thread_id
        if not thread_id:
            logger.error("thread_id not set")
            return
        tool_output = ToolOutput(output=result, tool_call_id=tool_call_id)
        await self._client.beta.threads.runs.submit_tool_outputs_and_poll(
            tool_outputs=[tool_output], run_id=run_id, thread_id=thread_id
        )
        self._submitted_tool_calls.add(tool_call_id)
        logger.debug(f"submitted tool call {tool_call_id} result")

Ancestors

Methods

def chat(self, *, chat_ctx: llm.ChatContext, fnc_ctx: llm.FunctionContext | None = None, temperature: float | None = None, n: int | None = None, parallel_tool_calls: bool | None = None)

Inherited members

class AssistantLoadOptions (assistant_id: str, thread_id: str | None)

AssistantLoadOptions(assistant_id: 'str', thread_id: 'str | None')

Expand source code
@dataclass
class AssistantLoadOptions:
    assistant_id: str
    thread_id: str | None

Class variables

var assistant_id : str
var thread_id : str | None
class AssistantOptions (create_options: AssistantCreateOptions | None = None, load_options: AssistantLoadOptions | None = None)

Options for creating (on-the-fly) or loading an assistant. Only one of create_options or load_options should be set.

Expand source code
@dataclass
class AssistantOptions:
    """Options for creating (on-the-fly) or loading an assistant. Only one of create_options or load_options should be set."""

    create_options: AssistantCreateOptions | None = None
    load_options: AssistantLoadOptions | None = None

Class variables

var create_optionsAssistantCreateOptions | None
var load_optionsAssistantLoadOptions | None
class OnFileUploadedInfo (type: "Literal['image']", original_file: llm.ChatImage, openai_file_object: FileObject)

OnFileUploadedInfo(type: "Literal['image']", original_file: 'llm.ChatImage', openai_file_object: 'FileObject')

Expand source code
@dataclass
class OnFileUploadedInfo:
    type: Literal["image"]
    original_file: llm.ChatImage
    openai_file_object: FileObject

Class variables

var openai_file_object : openai.types.file_object.FileObject
var original_fileChatImage
var type : Literal['image']