# generated by datamodel-codegen:
#   filename:  openaiInputOutputPreprocessing.json
#   timestamp: 2025-08-04T17:32:50+00:00

from __future__ import annotations
from pydantic import BaseModel, ConfigDict, RootModel
from enum import Enum
from typing import Any, List, Literal, Optional, Union


class PreprocessingCreationParameter(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    prompt: str


class OpenaiReasoningEffort(Enum):
    low = "low"
    medium = "medium"
    high = "high"


class OpenaiUserMessage(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    type: Literal["user"]
    content: str


class OpenaiAssistantReasoningMessage(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    type: Literal["assistantReasoning"]
    content: str


class OpenaiAssistantToolCallMessage(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    type: Literal["assistantToolCall"]
    toolName: str
    toolParameter: str


class OpenaiAssistantCommentaryPreambleMessage(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    type: Literal["assistantCommentaryPreamble"]
    content: str


class OpenaiAssistantFinalMessage(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    type: Literal["assistantFinal"]
    content: str


class OpenaiToolResultMessage(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    type: Literal["toolResult"]
    toolName: str
    toolResult: str


class OpenaiMessage(
    RootModel[
        Union[
            OpenaiUserMessage,
            OpenaiAssistantReasoningMessage,
            OpenaiAssistantToolCallMessage,
            OpenaiAssistantCommentaryPreambleMessage,
            OpenaiAssistantFinalMessage,
            OpenaiToolResultMessage,
        ]
    ]
):
    root: Union[
        OpenaiUserMessage,
        OpenaiAssistantReasoningMessage,
        OpenaiAssistantToolCallMessage,
        OpenaiAssistantCommentaryPreambleMessage,
        OpenaiAssistantFinalMessage,
        OpenaiToolResultMessage,
    ]


class OpenaiToolDefinition(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    name: str
    description: str
    parametersJsonSchema: Optional[Any] = None


class OpenaiChat(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    developerInstructions: str
    reasoningEffort: OpenaiReasoningEffort
    toolDefinitions: List[OpenaiToolDefinition]
    messages: List[OpenaiMessage]


class OpenaiInputOutputPreprocessingRpcGetStopTokensParameter(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    encodingName: str


class OpenaiInputOutputPreprocessingRpcGetStopTokensReturns(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    stopTokens: List[float]


class PseudoOpenaiInputOutputPreprocessingRpcGetStopTokens(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    parameter: OpenaiInputOutputPreprocessingRpcGetStopTokensParameter
    returns: OpenaiInputOutputPreprocessingRpcGetStopTokensReturns


class OpenaiInputOutputPreprocessingRpcRenderChatToTextParameter(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    encodingName: str
    chat: OpenaiChat


class OpenaiInputOutputPreprocessingRpcRenderChatToTextReturns(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    text: str


class PseudoOpenaiInputOutputPreprocessingRpcRenderChatToText(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    parameter: OpenaiInputOutputPreprocessingRpcRenderChatToTextParameter
    returns: OpenaiInputOutputPreprocessingRpcRenderChatToTextReturns


class PseudoOpenaiInputOutputPreprocessing(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    rpcGetStopTokens: PseudoOpenaiInputOutputPreprocessingRpcGetStopTokens
    rpcRenderChatToText: PseudoOpenaiInputOutputPreprocessingRpcRenderChatToText


class Model(BaseModel):
    model_config = ConfigDict(
        extra="allow",
    )
    openaiInputOutputPreprocessing: PseudoOpenaiInputOutputPreprocessing
