Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
validate_model_info,
)
from autogen_core.tools import BaseTool, Tool, ToolSchema
from pydantic import BaseModel
from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase
from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior
from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings
Expand All @@ -37,6 +38,20 @@
logger = logging.getLogger(EVENT_LOGGER_NAME)


def ensure_serializable(data: BaseModel) -> BaseModel:
"""
Workaround for https://github.com/pydantic/pydantic/issues/7713, see https://github.com/pydantic/pydantic/issues/7713#issuecomment-2604574418
"""
try:
json.dumps(data)
except TypeError:
# use `vars` to coerce nested data into dictionaries
data_json_from_dicts = json.dumps(data, default=lambda x: vars(x)) # type: ignore
data_obj = json.loads(data_json_from_dicts)
data = type(data)(**data_obj)
return data


class SKChatCompletionAdapter(ChatCompletionClient):
"""
SKChatCompletionAdapter is an adapter that allows using Semantic Kernel model clients
Expand Down Expand Up @@ -472,7 +487,7 @@ async def create(
logger.info(
LLMCallEvent(
messages=[msg.model_dump() for msg in chat_history],
response=result[0].model_dump(),
response=ensure_serializable(result[0]).model_dump(),
Comment thread
lspinheiro marked this conversation as resolved.
prompt_tokens=prompt_tokens,
completion_tokens=completion_tokens,
)
Expand Down