Skip to content

anthropic

Integration with Anthropic's API.

Anthropic

Bases: Model

Thin wrapper around the anthropic.Anthropic client.

This wrapper is used to convert the input and output types specified by the users at a higher level to arguments to the anthropic.Anthropic client.

Source code in outlines/models/anthropic.py
class Anthropic(Model):
    """Thin wrapper around the `anthropic.Anthropic` client.

    This wrapper is used to convert the input and output types specified by the
    users at a higher level to arguments to the `anthropic.Anthropic` client.

    """
    def __init__(
        self, client: "AnthropicClient", model_name: Optional[str] = None
    ):
        """
        Parameters
        ----------
        client
            An `anthropic.Anthropic` client.
        model_name
            The name of the model to use.

        """
        self.client = client
        self.model_name = model_name
        self.type_adapter = AnthropicTypeAdapter()

    def generate(
        self,
        model_input: Union[str, Vision],
        output_type: Optional[Any] = None,
        **inference_kwargs: Any,
    ) -> str:
        """Generate text using Anthropic.

        Parameters
        ----------
        model_input
            The prompt based on which the model will generate a response.
        output_type
            As structured generation is not supported by Anthropic, the value
            of this argument must be `None`. Otherwise, an error will be
            raised at runtime.
        **inference_kwargs
            Additional keyword arguments to pass to the client.

        Returns
        -------
        str
            The response generated by the model.

        """
        messages = self.type_adapter.format_input(model_input)

        if output_type is not None:
            raise NotImplementedError(
                f"The type {output_type} is not available with Anthropic."
            )

        if (
            "model" not in inference_kwargs
            and self.model_name is not None
        ):
            inference_kwargs["model"] = self.model_name

        completion = self.client.messages.create(
            **messages,
            **inference_kwargs,
        )
        return completion.content[0].text

    def generate_stream(
        self,
        model_input: Union[str, Vision],
        output_type: Optional[Any] = None,
        **inference_kwargs: Any,
    ) -> Iterator[str]:
        """Stream text using Anthropic.

        Parameters
        ----------
        model_input
            The prompt based on which the model will generate a response.
        output_type
            As structured generation is not supported by Anthropic, the value
            of this argument must be `None`. Otherwise, an error will be
            raised at runtime.
        **inference_kwargs
            Additional keyword arguments to pass to the client.

        Returns
        -------
        Iterator[str]
            An iterator that yields the text generated by the model.

        """
        messages = self.type_adapter.format_input(model_input)

        if output_type is not None:
            raise NotImplementedError(
                f"The type {output_type} is not available with Anthropic."
            )

        if (
            "model" not in inference_kwargs
            and self.model_name is not None
        ):
            inference_kwargs["model"] = self.model_name

        stream = self.client.messages.create(
            **messages,
            stream=True,
            **inference_kwargs,
        )

        for chunk in stream:
            if (
                chunk.type == "content_block_delta"
                and chunk.delta.type == "text_delta"
            ):
                yield chunk.delta.text

__init__(client, model_name=None)

Parameters:

Name Type Description Default
client Anthropic

An anthropic.Anthropic client.

required
model_name Optional[str]

The name of the model to use.

None
Source code in outlines/models/anthropic.py
def __init__(
    self, client: "AnthropicClient", model_name: Optional[str] = None
):
    """
    Parameters
    ----------
    client
        An `anthropic.Anthropic` client.
    model_name
        The name of the model to use.

    """
    self.client = client
    self.model_name = model_name
    self.type_adapter = AnthropicTypeAdapter()

generate(model_input, output_type=None, **inference_kwargs)

Generate text using Anthropic.

Parameters:

Name Type Description Default
model_input Union[str, Vision]

The prompt based on which the model will generate a response.

required
output_type Optional[Any]

As structured generation is not supported by Anthropic, the value of this argument must be None. Otherwise, an error will be raised at runtime.

None
**inference_kwargs Any

Additional keyword arguments to pass to the client.

{}

Returns:

Type Description
str

The response generated by the model.

Source code in outlines/models/anthropic.py
def generate(
    self,
    model_input: Union[str, Vision],
    output_type: Optional[Any] = None,
    **inference_kwargs: Any,
) -> str:
    """Generate text using Anthropic.

    Parameters
    ----------
    model_input
        The prompt based on which the model will generate a response.
    output_type
        As structured generation is not supported by Anthropic, the value
        of this argument must be `None`. Otherwise, an error will be
        raised at runtime.
    **inference_kwargs
        Additional keyword arguments to pass to the client.

    Returns
    -------
    str
        The response generated by the model.

    """
    messages = self.type_adapter.format_input(model_input)

    if output_type is not None:
        raise NotImplementedError(
            f"The type {output_type} is not available with Anthropic."
        )

    if (
        "model" not in inference_kwargs
        and self.model_name is not None
    ):
        inference_kwargs["model"] = self.model_name

    completion = self.client.messages.create(
        **messages,
        **inference_kwargs,
    )
    return completion.content[0].text

generate_stream(model_input, output_type=None, **inference_kwargs)

Stream text using Anthropic.

Parameters:

Name Type Description Default
model_input Union[str, Vision]

The prompt based on which the model will generate a response.

required
output_type Optional[Any]

As structured generation is not supported by Anthropic, the value of this argument must be None. Otherwise, an error will be raised at runtime.

None
**inference_kwargs Any

Additional keyword arguments to pass to the client.

{}

Returns:

Type Description
Iterator[str]

An iterator that yields the text generated by the model.

Source code in outlines/models/anthropic.py
def generate_stream(
    self,
    model_input: Union[str, Vision],
    output_type: Optional[Any] = None,
    **inference_kwargs: Any,
) -> Iterator[str]:
    """Stream text using Anthropic.

    Parameters
    ----------
    model_input
        The prompt based on which the model will generate a response.
    output_type
        As structured generation is not supported by Anthropic, the value
        of this argument must be `None`. Otherwise, an error will be
        raised at runtime.
    **inference_kwargs
        Additional keyword arguments to pass to the client.

    Returns
    -------
    Iterator[str]
        An iterator that yields the text generated by the model.

    """
    messages = self.type_adapter.format_input(model_input)

    if output_type is not None:
        raise NotImplementedError(
            f"The type {output_type} is not available with Anthropic."
        )

    if (
        "model" not in inference_kwargs
        and self.model_name is not None
    ):
        inference_kwargs["model"] = self.model_name

    stream = self.client.messages.create(
        **messages,
        stream=True,
        **inference_kwargs,
    )

    for chunk in stream:
        if (
            chunk.type == "content_block_delta"
            and chunk.delta.type == "text_delta"
        ):
            yield chunk.delta.text

AnthropicTypeAdapter

Bases: ModelTypeAdapter

Type adapter for the Anthropic model.

AnthropicTypeAdapter is responsible for preparing the arguments to Anthropic's messages.create method: the input (prompt and possibly image). Anthropic does not support defining the output type, so format_output_type is not implemented.

Source code in outlines/models/anthropic.py
class AnthropicTypeAdapter(ModelTypeAdapter):
    """Type adapter for the `Anthropic` model.

    `AnthropicTypeAdapter` is responsible for preparing the arguments to
    Anthropic's `messages.create` method: the input (prompt and possibly
    image).
    Anthropic does not support defining the output type, so
    `format_output_type` is not implemented.

    """

    def format_input(self, model_input: Union[str, Vision]) -> dict:
        """Generate the `messages` argument to pass to the client.

        Parameters
        ----------
        model_input
            The input provided by the user.

        Returns
        -------
        dict
            The `messages` argument to pass to the client.

        """
        if isinstance(model_input, str):
            return self.format_str_model_input(model_input)
        elif isinstance(model_input, Vision):
            return self.format_vision_model_input(model_input)
        raise TypeError(
            f"The input type {input} is not available with Anthropic. "
            "The only available types are `str` and `Vision`."
        )

    def format_str_model_input(self, model_input: str) -> dict:
        return {
            "messages": [
                {
                    "role": "user",
                    "content": model_input,
                }
            ]
        }

    def format_vision_model_input(self, model_input: Vision) -> dict:
        return {
            "messages": [
                {
                    "role": "user",
                    "content": [
                        {
                            "type": "image",
                            "source": {
                                "type": "base64",
                                "media_type": model_input.image_format,
                                "data": model_input.image_str,
                            },
                        },
                        {"type": "text", "text": model_input.prompt},
                    ],
                }
            ]
        }

    def format_output_type(self, output_type):
        """Not implemented for Anthropic."""
        if output_type is None:
            return {}
        else:
            raise NotImplementedError(
                f"The output type {output_type} is not available with "
                "Anthropic."
            )

format_input(model_input)

Generate the messages argument to pass to the client.

Parameters:

Name Type Description Default
model_input Union[str, Vision]

The input provided by the user.

required

Returns:

Type Description
dict

The messages argument to pass to the client.

Source code in outlines/models/anthropic.py
def format_input(self, model_input: Union[str, Vision]) -> dict:
    """Generate the `messages` argument to pass to the client.

    Parameters
    ----------
    model_input
        The input provided by the user.

    Returns
    -------
    dict
        The `messages` argument to pass to the client.

    """
    if isinstance(model_input, str):
        return self.format_str_model_input(model_input)
    elif isinstance(model_input, Vision):
        return self.format_vision_model_input(model_input)
    raise TypeError(
        f"The input type {input} is not available with Anthropic. "
        "The only available types are `str` and `Vision`."
    )

format_output_type(output_type)

Not implemented for Anthropic.

Source code in outlines/models/anthropic.py
def format_output_type(self, output_type):
    """Not implemented for Anthropic."""
    if output_type is None:
        return {}
    else:
        raise NotImplementedError(
            f"The output type {output_type} is not available with "
            "Anthropic."
        )

from_anthropic(client, model_name=None)

Create an Outlines Anthropic model instance from an anthropic.Anthropic client instance.

Parameters:

Name Type Description Default
client Anthropic

An anthropic.Anthropic client instance.

required
model_name Optional[str]

The name of the model to use.

None

Returns:

Type Description
Anthropic

An Outlines Anthropic model instance.

Source code in outlines/models/anthropic.py
def from_anthropic(
    client: "AnthropicClient", model_name: Optional[str] = None
) -> Anthropic:
    """Create an Outlines `Anthropic` model instance from an
    `anthropic.Anthropic` client instance.

    Parameters
    ----------
    client
        An `anthropic.Anthropic` client instance.
    model_name
        The name of the model to use.

    Returns
    -------
    Anthropic
        An Outlines `Anthropic` model instance.

    """
    return Anthropic(client, model_name)