Module livekit.agents.llm

Sub-modules

livekit.agents.llm.chat_context
livekit.agents.llm.function_context
livekit.agents.llm.llm

Functions

def ai_callable(*, name: str | None = None, description: str | _UseDocMarker | None = None, auto_retry: bool = False) ‑> Callable

Classes

class CalledFunction (call_info: FunctionCallInfo, task: asyncio.Task[Any], result: Any | None = None, exception: BaseException | None = None)

CalledFunction(call_info: 'FunctionCallInfo', task: 'asyncio.Task[Any]', result: 'Any | None' = None, exception: 'BaseException | None' = None)

Expand source code
@dataclass
class CalledFunction:
    call_info: FunctionCallInfo
    task: asyncio.Task[Any]
    result: Any | None = None
    exception: BaseException | None = None

Class variables

var call_infoFunctionCallInfo
var exception : BaseException | None
var result : typing.Any | None
var task : _asyncio.Task[typing.Any]
class ChatAudio (frame: rtc.AudioFrame | list[rtc.AudioFrame])

ChatAudio(frame: 'rtc.AudioFrame | list[rtc.AudioFrame]')

Expand source code
@dataclass
class ChatAudio:
    frame: rtc.AudioFrame | list[rtc.AudioFrame]

Class variables

var frameAudioFrame | list[AudioFrame]
class ChatChunk (choices: list[Choice] = <factory>)

ChatChunk(choices: 'list[Choice]' = )

Expand source code
@dataclass
class ChatChunk:
    choices: list[Choice] = field(default_factory=list)

Class variables

var choices : list[Choice]
class ChatContext (messages: list[ChatMessage] = <factory>)

ChatContext(messages: 'list[ChatMessage]' = )

Expand source code
@dataclass
class ChatContext:
    messages: list[ChatMessage] = field(default_factory=list)
    _metadata: dict[str, Any] = field(default_factory=dict, repr=False, init=False)

    def append(
        self, *, text: str = "", images: list[ChatImage] = [], role: ChatRole = "system"
    ) -> ChatContext:
        self.messages.append(ChatMessage.create(text=text, images=images, role=role))
        return self

    def copy(self) -> ChatContext:
        copied_chat_ctx = ChatContext(messages=[m.copy() for m in self.messages])
        copied_chat_ctx._metadata = self._metadata
        return copied_chat_ctx

Class variables

var messages : list[ChatMessage]

Methods

def append(self, *, text: str = '', images: list[ChatImage] = [], role: ChatRole = 'system') ‑> ChatContext
def copy(self) ‑> ChatContext
class ChatImage (image: str | rtc.VideoFrame, inference_width: int | None = None, inference_height: int | None = None)

ChatImage(image: 'str | rtc.VideoFrame', inference_width: 'int | None' = None, inference_height: 'int | None' = None)

Expand source code
@dataclass
class ChatImage:
    image: str | rtc.VideoFrame
    inference_width: int | None = None
    inference_height: int | None = None
    _cache: dict[Any, Any] = field(default_factory=dict, repr=False, init=False)
    """_cache is used  by LLM implementations to store a processed version of the image
    for later use.
    """

Class variables

var image : str | VideoFrame
var inference_height : int | None
var inference_width : int | None
class ChatMessage (role: ChatRole, id: str | None = None, name: str | None = None, content: ChatContent | list[ChatContent] | None = None, tool_calls: list[FunctionCallInfo] | None = None, tool_call_id: str | None = None, tool_exception: Exception | None = None)

ChatMessage(role: 'ChatRole', id: 'str | None' = None, name: 'str | None' = None, content: 'ChatContent | list[ChatContent] | None' = None, tool_calls: 'list[function_context.FunctionCallInfo] | None' = None, tool_call_id: 'str | None' = None, tool_exception: 'Exception | None' = None)

Expand source code
@dataclass
class ChatMessage:
    role: ChatRole
    id: str | None = None  # used by the OAI realtime API
    name: str | None = None
    content: ChatContent | list[ChatContent] | None = None
    tool_calls: list[function_context.FunctionCallInfo] | None = None
    tool_call_id: str | None = None
    tool_exception: Exception | None = None
    _metadata: dict[str, Any] = field(default_factory=dict, repr=False, init=False)

    @staticmethod
    def create_tool_from_called_function(
        called_function: function_context.CalledFunction,
    ) -> "ChatMessage":
        if not called_function.task.done():
            raise ValueError("cannot create a tool result from a running ai function")

        tool_exception: Exception | None = None
        try:
            content = called_function.task.result()
        except BaseException as e:
            if isinstance(e, Exception):
                tool_exception = e
            content = f"Error: {e}"

        return ChatMessage(
            role="tool",
            name=called_function.call_info.function_info.name,
            content=content,
            tool_call_id=called_function.call_info.tool_call_id,
            tool_exception=tool_exception,
        )

    @staticmethod
    def create_tool_calls(
        called_functions: list[function_context.FunctionCallInfo],
    ) -> "ChatMessage":
        return ChatMessage(role="assistant", tool_calls=called_functions)

    @staticmethod
    def create(
        *, text: str = "", images: list[ChatImage] = [], role: ChatRole = "system"
    ) -> "ChatMessage":
        if len(images) == 0:
            return ChatMessage(role=role, content=text)
        else:
            content: list[ChatContent] = []
            if text:
                content.append(text)

            if len(images) > 0:
                content.extend(images)

            return ChatMessage(role=role, content=content)

    def copy(self):
        content = self.content
        if isinstance(content, list):
            content = content.copy()

        tool_calls = self.tool_calls
        if tool_calls is not None:
            tool_calls = tool_calls.copy()

        copied_msg = ChatMessage(
            role=self.role,
            name=self.name,
            content=content,
            tool_calls=tool_calls,
            tool_call_id=self.tool_call_id,
        )
        copied_msg._metadata = self._metadata
        return copied_msg

Class variables

var content : Union[str, ChatImageChatAudio, list[Union[str, ChatImageChatAudio]], ForwardRef(None)]
var id : str | None
var name : str | None
var role : Literal['system', 'user', 'assistant', 'tool']
var tool_call_id : str | None
var tool_calls : list[FunctionCallInfo] | None
var tool_exception : Exception | None

Static methods

def create(*, text: str = '', images: list[ChatImage] = [], role: ChatRole = 'system') ‑> ChatMessage
def create_tool_calls(called_functions: list[FunctionCallInfo]) ‑> ChatMessage
def create_tool_from_called_function(called_function: CalledFunction) ‑> ChatMessage

Methods

def copy(self)
class Choice (delta: ChoiceDelta, index: int = 0)

Choice(delta: 'ChoiceDelta', index: 'int' = 0)

Expand source code
@dataclass
class Choice:
    delta: ChoiceDelta
    index: int = 0

Class variables

var deltaChoiceDelta
var index : int
class ChoiceDelta (role: ChatRole, content: str | None = None, tool_calls: list[FunctionCallInfo] | None = None)

ChoiceDelta(role: 'ChatRole', content: 'str | None' = None, tool_calls: 'list[function_context.FunctionCallInfo] | None' = None)

Expand source code
@dataclass
class ChoiceDelta:
    role: ChatRole
    content: str | None = None
    tool_calls: list[function_context.FunctionCallInfo] | None = None

Class variables

var content : str | None
var role : Literal['system', 'user', 'assistant', 'tool']
var tool_calls : list[FunctionCallInfo] | None
class FunctionArgInfo (name: str, description: str, type: type, default: Any, choices: tuple | None)

FunctionArgInfo(name: 'str', description: 'str', type: 'type', default: 'Any', choices: 'tuple | None')

Expand source code
@dataclass(frozen=True)
class FunctionArgInfo:
    name: str
    description: str
    type: type
    default: Any
    choices: tuple | None

Class variables

var choices : tuple | None
var default : Any
var description : str
var name : str
var type : type
class FunctionCallInfo (tool_call_id: str, function_info: FunctionInfo, raw_arguments: str, arguments: dict[str, Any])

FunctionCallInfo(tool_call_id: 'str', function_info: 'FunctionInfo', raw_arguments: 'str', arguments: 'dict[str, Any]')

Expand source code
@dataclass(frozen=True)
class FunctionCallInfo:
    tool_call_id: str
    function_info: FunctionInfo
    raw_arguments: str
    arguments: dict[str, Any]

    def execute(self) -> CalledFunction:
        function_info = self.function_info
        func = functools.partial(function_info.callable, **self.arguments)
        if asyncio.iscoroutinefunction(function_info.callable):
            task = asyncio.create_task(func())
        else:
            task = asyncio.create_task(asyncio.to_thread(func))

        called_fnc = CalledFunction(call_info=self, task=task)

        def _on_done(fut):
            try:
                called_fnc.result = fut.result()
            except BaseException as e:
                called_fnc.exception = e

        task.add_done_callback(_on_done)
        return called_fnc

Class variables

var arguments : dict[str, typing.Any]
var function_infoFunctionInfo
var raw_arguments : str
var tool_call_id : str

Methods

def execute(self) ‑> CalledFunction
class FunctionContext
Expand source code
class FunctionContext:
    def __init__(self) -> None:
        self._fncs = dict[str, FunctionInfo]()

        for _, member in inspect.getmembers(self, predicate=inspect.ismethod):
            if hasattr(member, METADATA_ATTR):
                self._register_ai_function(member)

    def ai_callable(
        self,
        *,
        name: str | None = None,
        description: str | _UseDocMarker | None = None,
        auto_retry: bool = True,
    ) -> Callable:
        def deco(f):
            _set_metadata(f, name=name, desc=description, auto_retry=auto_retry)
            self._register_ai_function(f)

        return deco

    def _register_ai_function(self, fnc: Callable) -> None:
        if not hasattr(fnc, METADATA_ATTR):
            logger.warning(f"function {fnc.__name__} does not have ai metadata")
            return

        metadata: _AIFncMetadata = getattr(fnc, METADATA_ATTR)
        fnc_name = metadata.name
        if fnc_name in self._fncs:
            raise ValueError(f"duplicate ai_callable name: {fnc_name}")

        sig = inspect.signature(fnc)

        # get_type_hints with include_extra=True is needed when using Annotated
        # using typing.get_args with param.Annotated is returning an empty tuple for some reason
        type_hints = typing.get_type_hints(
            fnc, include_extras=True
        )  # Annotated[T, ...] -> T
        args = dict[str, FunctionArgInfo]()

        for name, param in sig.parameters.items():
            if param.kind not in (
                inspect.Parameter.POSITIONAL_OR_KEYWORD,
                inspect.Parameter.KEYWORD_ONLY,
            ):
                raise ValueError(f"{fnc_name}: unsupported parameter kind {param.kind}")

            inner_th, type_info = _extract_types(type_hints[name])

            if not is_type_supported(inner_th):
                raise ValueError(
                    f"{fnc_name}: unsupported type {inner_th} for parameter {name}"
                )

            desc = type_info.description if type_info else ""
            choices = type_info.choices if type_info else None

            is_optional, optional_inner = _is_optional_type(inner_th)
            if is_optional:
                # when the type is optional, only the inner type is relevant
                # the argument info for default would be None
                inner_th = optional_inner

            if issubclass(inner_th, enum.Enum) and not choices:
                # the enum must be a str or int (and at least one value)
                # this is verified by is_type_supported
                choices = tuple([item.value for item in inner_th])
                inner_th = type(choices[0])

            args[name] = FunctionArgInfo(
                name=name,
                description=desc,
                type=inner_th,
                default=param.default,
                choices=choices,
            )

        self._fncs[metadata.name] = FunctionInfo(
            name=metadata.name,
            description=metadata.description,
            auto_retry=metadata.auto_retry,
            callable=fnc,
            arguments=args,
        )

    @property
    def ai_functions(self) -> dict[str, FunctionInfo]:
        return self._fncs

Instance variables

prop ai_functions : dict[str, FunctionInfo]
Expand source code
@property
def ai_functions(self) -> dict[str, FunctionInfo]:
    return self._fncs

Methods

def ai_callable(self, *, name: str | None = None, description: str | _UseDocMarker | None = None, auto_retry: bool = True) ‑> Callable
class FunctionInfo (name: str, description: str, auto_retry: bool, callable: Callable, arguments: dict[str, FunctionArgInfo])

FunctionInfo(name: 'str', description: 'str', auto_retry: 'bool', callable: 'Callable', arguments: 'dict[str, FunctionArgInfo]')

Expand source code
@dataclass(frozen=True)
class FunctionInfo:
    name: str
    description: str
    auto_retry: bool
    callable: Callable
    arguments: dict[str, FunctionArgInfo]

Class variables

var arguments : dict[str, FunctionArgInfo]
var auto_retry : bool
var callable : Callable
var description : str
var name : str
class LLM

Helper class that provides a standard way to create an ABC using inheritance.

Expand source code
class LLM(abc.ABC):
    @abc.abstractmethod
    def chat(
        self,
        *,
        chat_ctx: ChatContext,
        fnc_ctx: function_context.FunctionContext | None = None,
        temperature: float | None = None,
        n: int | None = None,
        parallel_tool_calls: bool | None = None,
    ) -> "LLMStream": ...

Ancestors

  • abc.ABC

Subclasses

  • livekit.plugins.anthropic.llm.LLM
  • AssistantLLM
  • livekit.plugins.openai.llm.LLM

Methods

def chat(self, *, chat_ctx: ChatContext, fnc_ctx: FunctionContext | None = None, temperature: float | None = None, n: int | None = None, parallel_tool_calls: bool | None = None) ‑> LLMStream
class LLMStream (*, chat_ctx: ChatContext, fnc_ctx: FunctionContext | None)

Helper class that provides a standard way to create an ABC using inheritance.

Expand source code
class LLMStream(abc.ABC):
    def __init__(
        self, *, chat_ctx: ChatContext, fnc_ctx: function_context.FunctionContext | None
    ) -> None:
        self._function_calls_info: list[function_context.FunctionCallInfo] = []
        self._tasks = set[asyncio.Task[Any]]()
        self._chat_ctx = chat_ctx
        self._fnc_ctx = fnc_ctx

    @property
    def function_calls(self) -> list[function_context.FunctionCallInfo]:
        """List of called functions from this stream."""
        return self._function_calls_info

    @property
    def chat_ctx(self) -> ChatContext:
        """The initial chat context of this stream."""
        return self._chat_ctx

    @property
    def fnc_ctx(self) -> function_context.FunctionContext | None:
        """The function context of this stream."""
        return self._fnc_ctx

    def execute_functions(self) -> list[function_context.CalledFunction]:
        """Execute all functions concurrently of this stream."""
        called_functions: list[function_context.CalledFunction] = []
        for fnc_info in self._function_calls_info:
            called_fnc = fnc_info.execute()
            self._tasks.add(called_fnc.task)
            called_fnc.task.add_done_callback(self._tasks.remove)
            called_functions.append(called_fnc)

        return called_functions

    async def aclose(self) -> None:
        await utils.aio.gracefully_cancel(*self._tasks)

    def __aiter__(self) -> AsyncIterator[ChatChunk]:
        return self

    @abc.abstractmethod
    async def __anext__(self) -> ChatChunk: ...

Ancestors

  • abc.ABC

Subclasses

  • livekit.plugins.anthropic.llm.LLMStream
  • AssistantLLMStream
  • livekit.plugins.openai.llm.LLMStream

Instance variables

prop chat_ctxChatContext

The initial chat context of this stream.

Expand source code
@property
def chat_ctx(self) -> ChatContext:
    """The initial chat context of this stream."""
    return self._chat_ctx
prop fnc_ctxFunctionContext | None

The function context of this stream.

Expand source code
@property
def fnc_ctx(self) -> function_context.FunctionContext | None:
    """The function context of this stream."""
    return self._fnc_ctx
prop function_calls : list[FunctionCallInfo]

List of called functions from this stream.

Expand source code
@property
def function_calls(self) -> list[function_context.FunctionCallInfo]:
    """List of called functions from this stream."""
    return self._function_calls_info

Methods

async def aclose(self) ‑> None
def execute_functions(self) ‑> list[CalledFunction]

Execute all functions concurrently of this stream.

class TypeInfo (description: str, choices: tuple | list[Any] = ())

TypeInfo(description: 'str', choices: 'tuple | list[Any]' = ()) -> 'None'

Expand source code
@dataclass(frozen=True, init=False)
class TypeInfo:
    description: str
    choices: tuple

    def __init__(self, description: str, choices: tuple | list[Any] = tuple()) -> None:
        object.__setattr__(self, "description", description)

        if isinstance(choices, list):
            choices = tuple(choices)

        object.__setattr__(self, "choices", choices)

Class variables

var choices : tuple
var description : str