Skip to content

Openai Backend

OpenAIBackendMixin

Bases: Backend

Backend for interacting with OpenAI's API.

Attributes:

Name Type Description
api_key

The API key for the OpenAI service.

client

The client for the OpenAI service.

serialized

The serializer for the OpenAI backend.

response_format

The format for the response.

Source code in mbodied/agents/backends/openai_backend.py
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
class OpenAIBackendMixin(Backend):
    """Backend for interacting with OpenAI's API.

    Attributes:
        api_key: The API key for the OpenAI service.
        client: The client for the OpenAI service.
        serialized: The serializer for the OpenAI backend.
        response_format: The format for the response.
    """

    INITIAL_CONTEXT = [
        Message(role="system", content="You are a robot with advanced spatial reasoning."),
    ]
    DEFAULT_MODEL = "gpt-4o"

    def __init__(
        self,
        api_key: str | None = None,
        client: Any | None = None,
        response_format: str = None,
        aclient=False,
        **kwargs,
    ):
        """Initializes the OpenAIBackend with the given API key and client.

        Args:
            api_key: The API key for the OpenAI service.
            client: An optional client for the OpenAI service.
            response_format: The format for the response.
            aclient: Whether to use the asynchronous client.
            **kwargs: Additional keyword arguments.
        """
        self.api_key = api_key or os.getenv("OPENAI_API_KEY") or os.getenv("MBODI_API_KEY")
        self.client = client
        if self.client is None:
            from openai import AsyncOpenAI, OpenAI

            kwargs.pop("model_src", None)
            self.client = OpenAI(api_key=self.api_key or "any_key", **kwargs)
            if aclient:
                self.aclient = AsyncOpenAI(api_key=self.api_key or "any_key", **kwargs)

        self.serialized = OpenAISerializer
        self.response_format = response_format

    @backoff.on_exception(
        backoff.expo,
        ERRORS,
        max_tries=3,
        on_backoff=lambda details: print(f"Backing off {details['wait']:.1f} seconds after {details['tries']} tries."),  # noqa
    )
    def predict(
        self, message: Message, context: List[Message] | None = None, model: Any | None = None, **kwargs
    ) -> str:
        """Create a completion based on the given message and context.

        Args:
            message (Message): The message to process.
            context (Optional[List[Message]]): The context of messages.
            model (Optional[Any]): The model used for processing the messages.
            **kwargs: Additional keyword arguments.

        Returns:
            str: The result of the completion.
        """
        context = context or self.INITIAL_CONTEXT
        model = model or self.DEFAULT_MODEL
        serialized_messages = [self.serialized(msg).serialize() for msg in context + [message]]

        completion = self.client.chat.completions.create(
            model=model,
            messages=serialized_messages,
            temperature=0,
            max_tokens=1000,
            **kwargs,
        )
        return completion.choices[0].message.content

    def stream(self, message: Message, context: List[Message] = None, model: str = "gpt-4o", **kwargs):
        """Streams a completion for the given messages using the OpenAI API standard.

        Args:
            message: Message to be sent to the completion API.
            context: The context of the messages.
            model: The model to be used for the completion.
            **kwargs: Additional keyword arguments.
        """
        model = model or self.DEFAULT_MODEL
        context = context or self.INITIAL_CONTEXT
        serialized_messages = [self.serialized(msg).serialize() for msg in context + [message]]
        stream = self.client.chat.completions.create(
            messages=serialized_messages,
            model=model,
            temperature=0,
            stream=True,
            **kwargs,
        )
        for chunk in stream:
            yield chunk.choices[0].delta.content or ""

    async def astream(self, message: Message, context: List[Message] = None, model: str = "gpt-4o", **kwargs):
        """Streams a completion asynchronously for the given messages using the OpenAI API standard.

        Args:
            message: Message to be sent to the completion API.
            context: The context of the messages.
            model: The model to be used for the completion.
            **kwargs: Additional keyword arguments.
        """
        if not hasattr(self, "aclient"):
            raise AttributeError("AsyncOpenAI client not initialized. Pass in aclient=True to the constructor.")
        model = model or self.DEFAULT_MODEL
        context = context or self.INITIAL_CONTEXT
        serialized_messages = [self.serialized(msg).serialize() for msg in context + [message]]
        stream = await self.aclient.chat.completions.create(
            messages=serialized_messages,
            model=model,
            temperature=0,
            stream=True,
            **kwargs,
        )
        async for chunk in stream:
            yield chunk.choices[0].delta.content or ""

__init__(api_key=None, client=None, response_format=None, aclient=False, **kwargs)

Initializes the OpenAIBackend with the given API key and client.

Parameters:

Name Type Description Default
api_key str | None

The API key for the OpenAI service.

None
client Any | None

An optional client for the OpenAI service.

None
response_format str

The format for the response.

None
aclient

Whether to use the asynchronous client.

False
**kwargs

Additional keyword arguments.

{}
Source code in mbodied/agents/backends/openai_backend.py
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
def __init__(
    self,
    api_key: str | None = None,
    client: Any | None = None,
    response_format: str = None,
    aclient=False,
    **kwargs,
):
    """Initializes the OpenAIBackend with the given API key and client.

    Args:
        api_key: The API key for the OpenAI service.
        client: An optional client for the OpenAI service.
        response_format: The format for the response.
        aclient: Whether to use the asynchronous client.
        **kwargs: Additional keyword arguments.
    """
    self.api_key = api_key or os.getenv("OPENAI_API_KEY") or os.getenv("MBODI_API_KEY")
    self.client = client
    if self.client is None:
        from openai import AsyncOpenAI, OpenAI

        kwargs.pop("model_src", None)
        self.client = OpenAI(api_key=self.api_key or "any_key", **kwargs)
        if aclient:
            self.aclient = AsyncOpenAI(api_key=self.api_key or "any_key", **kwargs)

    self.serialized = OpenAISerializer
    self.response_format = response_format

astream(message, context=None, model='gpt-4o', **kwargs) async

Streams a completion asynchronously for the given messages using the OpenAI API standard.

Parameters:

Name Type Description Default
message Message

Message to be sent to the completion API.

required
context List[Message]

The context of the messages.

None
model str

The model to be used for the completion.

'gpt-4o'
**kwargs

Additional keyword arguments.

{}
Source code in mbodied/agents/backends/openai_backend.py
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
async def astream(self, message: Message, context: List[Message] = None, model: str = "gpt-4o", **kwargs):
    """Streams a completion asynchronously for the given messages using the OpenAI API standard.

    Args:
        message: Message to be sent to the completion API.
        context: The context of the messages.
        model: The model to be used for the completion.
        **kwargs: Additional keyword arguments.
    """
    if not hasattr(self, "aclient"):
        raise AttributeError("AsyncOpenAI client not initialized. Pass in aclient=True to the constructor.")
    model = model or self.DEFAULT_MODEL
    context = context or self.INITIAL_CONTEXT
    serialized_messages = [self.serialized(msg).serialize() for msg in context + [message]]
    stream = await self.aclient.chat.completions.create(
        messages=serialized_messages,
        model=model,
        temperature=0,
        stream=True,
        **kwargs,
    )
    async for chunk in stream:
        yield chunk.choices[0].delta.content or ""

predict(message, context=None, model=None, **kwargs)

Create a completion based on the given message and context.

Parameters:

Name Type Description Default
message Message

The message to process.

required
context Optional[List[Message]]

The context of messages.

None
model Optional[Any]

The model used for processing the messages.

None
**kwargs

Additional keyword arguments.

{}

Returns:

Name Type Description
str str

The result of the completion.

Source code in mbodied/agents/backends/openai_backend.py
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
@backoff.on_exception(
    backoff.expo,
    ERRORS,
    max_tries=3,
    on_backoff=lambda details: print(f"Backing off {details['wait']:.1f} seconds after {details['tries']} tries."),  # noqa
)
def predict(
    self, message: Message, context: List[Message] | None = None, model: Any | None = None, **kwargs
) -> str:
    """Create a completion based on the given message and context.

    Args:
        message (Message): The message to process.
        context (Optional[List[Message]]): The context of messages.
        model (Optional[Any]): The model used for processing the messages.
        **kwargs: Additional keyword arguments.

    Returns:
        str: The result of the completion.
    """
    context = context or self.INITIAL_CONTEXT
    model = model or self.DEFAULT_MODEL
    serialized_messages = [self.serialized(msg).serialize() for msg in context + [message]]

    completion = self.client.chat.completions.create(
        model=model,
        messages=serialized_messages,
        temperature=0,
        max_tokens=1000,
        **kwargs,
    )
    return completion.choices[0].message.content

stream(message, context=None, model='gpt-4o', **kwargs)

Streams a completion for the given messages using the OpenAI API standard.

Parameters:

Name Type Description Default
message Message

Message to be sent to the completion API.

required
context List[Message]

The context of the messages.

None
model str

The model to be used for the completion.

'gpt-4o'
**kwargs

Additional keyword arguments.

{}
Source code in mbodied/agents/backends/openai_backend.py
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
def stream(self, message: Message, context: List[Message] = None, model: str = "gpt-4o", **kwargs):
    """Streams a completion for the given messages using the OpenAI API standard.

    Args:
        message: Message to be sent to the completion API.
        context: The context of the messages.
        model: The model to be used for the completion.
        **kwargs: Additional keyword arguments.
    """
    model = model or self.DEFAULT_MODEL
    context = context or self.INITIAL_CONTEXT
    serialized_messages = [self.serialized(msg).serialize() for msg in context + [message]]
    stream = self.client.chat.completions.create(
        messages=serialized_messages,
        model=model,
        temperature=0,
        stream=True,
        **kwargs,
    )
    for chunk in stream:
        yield chunk.choices[0].delta.content or ""

OpenAISerializer

Bases: Serializer

Serializer for OpenAI-specific data formats.

Source code in mbodied/agents/backends/openai_backend.py
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
class OpenAISerializer(Serializer):
    """Serializer for OpenAI-specific data formats."""

    @classmethod
    def serialize_image(cls, image: Image) -> dict[str, Any]:
        """Serializes an image to the OpenAI format.

        Args:
            image: The image to be serialized.

        Returns:
            A dictionary representing the serialized image.
        """
        return {
            "type": "image_url",
            "image_url": {
                "url": image.url,
            },
        }

    @classmethod
    def serialize_text(cls, text: str) -> dict[str, Any]:
        """Serializes a text string to the OpenAI format.

        Args:
            text: The text to be serialized.

        Returns:
            A dictionary representing the serialized text.
        """
        return {"type": "text", "text": text}

serialize_image(image) classmethod

Serializes an image to the OpenAI format.

Parameters:

Name Type Description Default
image Image

The image to be serialized.

required

Returns:

Type Description
dict[str, Any]

A dictionary representing the serialized image.

Source code in mbodied/agents/backends/openai_backend.py
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
@classmethod
def serialize_image(cls, image: Image) -> dict[str, Any]:
    """Serializes an image to the OpenAI format.

    Args:
        image: The image to be serialized.

    Returns:
        A dictionary representing the serialized image.
    """
    return {
        "type": "image_url",
        "image_url": {
            "url": image.url,
        },
    }

serialize_text(text) classmethod

Serializes a text string to the OpenAI format.

Parameters:

Name Type Description Default
text str

The text to be serialized.

required

Returns:

Type Description
dict[str, Any]

A dictionary representing the serialized text.

Source code in mbodied/agents/backends/openai_backend.py
57
58
59
60
61
62
63
64
65
66
67
@classmethod
def serialize_text(cls, text: str) -> dict[str, Any]:
    """Serializes a text string to the OpenAI format.

    Args:
        text: The text to be serialized.

    Returns:
        A dictionary representing the serialized text.
    """
    return {"type": "text", "text": text}