Skip to content

openai

dandy.llm.request.openai

OpenaiRequestMessage

Bases: RequestMessage

content_as_str

Source code in dandy/llm/request/openai.py
def content_as_str(self) -> str:
    if self.content[0]['type'] == 'text':
        return self.content[0]['text']

    if self.content[0]['type'] == 'image_url':
        return self.content[0]['image_url']['url'].split(';base64,')[1]

    return self.content

OpenaiRequestBody

Bases: BaseRequestBody

stream = False class-attribute instance-attribute

response_format = {'type': 'json_schema', 'json_schema': {'name': 'response', 'strict': False, 'schema': ...}} class-attribute instance-attribute

max_completion_tokens = None class-attribute instance-attribute

seed = None class-attribute instance-attribute

temperature = None class-attribute instance-attribute

token_usage property

add_message

Source code in dandy/llm/request/openai.py
def add_message(
    self,
    role: RoleLiteralStr,
    content: str,
    images: List[str] | None = None,
    prepend: bool = False,
) -> None:
    message_content: List[dict] = [
        {
            'type': 'text',
            'text': content,
        }
    ]

    if images is not None:
        for image in images:
            message_content.append(
                {
                    'type': 'image_url',
                    'image_url': {
                        'url': f'data:{get_image_mime_type_from_base64_string(image)};base64,{image}'
                    },
                }
            )

    openai_request_message = OpenaiRequestMessage(
        role=role,
        content=message_content,
    )

    if prepend:
        self.messages.insert(0, openai_request_message)
    else:
        self.messages.append(openai_request_message)

get_context_length

Source code in dandy/llm/request/openai.py
def get_context_length(self) -> int:
    return 0

get_max_completion_tokens

Source code in dandy/llm/request/openai.py
def get_max_completion_tokens(self) -> int | None:
    return self.max_completion_tokens

get_seed

Source code in dandy/llm/request/openai.py
def get_seed(self) -> int | None:
    return self.seed

get_temperature

Source code in dandy/llm/request/openai.py
def get_temperature(self) -> float | None:
    return self.temperature

set_format_to_json_schema

Source code in dandy/llm/request/openai.py
def set_format_to_json_schema(self, json_schema: dict):
    self.response_format['json_schema']['schema'] = json_schema

set_format_to_text

Source code in dandy/llm/request/openai.py
def set_format_to_text(self):
    self.response_format = {'type': 'text'}

to_dict

Source code in dandy/llm/request/openai.py
def to_dict(self) -> dict:
    model_dict = self.model_dump()
    formated_messages = []
    for message in model_dict['messages']:
        for content in message['content']:
            if content['type'] == 'text':
                formated_messages.append(
                    {
                        'role': message['role'],
                        'content': content['text'],
                    }
                )
            elif content['type'] == 'image_url':
                formated_messages.append(
                    {
                        'role': message['role'],
                        'content': content['image_url']['url'].split(';base64,')[1],
                    }
                )

    model_dict['messages'] = formated_messages

    return model_dict