I am currently trying to use semantic kernel with databricks hosted sonnet-3.7 model. And I faced with the problem, that when creating an agent with the plugins, the plugins are not executed. It seems like the kernel doesn't support function calling with such models.
I created a class:
class DatabricksSonnetChatCompletion(ChatCompletionClientBase, BaseModel):
ai_model_id: str
service_id: str
endpoint_url: str
databricks_token: str
async def complete_chat_async(self, messages, settings=None, **kwargs) -> ChatMessageContent:
prompt_messages = [{"role": m.role.value, "content": m.content} for m in messages]
payload = {
"messages": prompt_messages,
"temperature": settings.extension_data.get("temperature", 0.2) if settings else 0.2,
"max_tokens": settings.extension_data.get("max_tokens", 1024) if settings else 1024,
}
# ✅ Extract and forward function definitions if provided
functions = kwargs.get("functions")
if functions:
payload["functions"] = functions
payload["function_call"] = "auto" # Let model decide
headers = {
"Authorization": f"Bearer {self.databricks_token}",
"Content-Type": "application/json",
}
# 🚀 Send the request
response = requests.post(self.endpoint_url, headers=headers, json=payload)
response.raise_for_status()
result = response.json()
choice = result['choices'][0]
message = choice["message"]
# ✅ Handle function call response
if "function_call" in message:
func_call = message["function_call"]
return ChatMessageContent(
role=AuthorRole.ASSISTANT,
content=None,
function_call=FunctionCallContent(
name=func_call.get("name"),
arguments=func_call.get("arguments")
)
)
# ✅ Normal assistant reply
return ChatMessageContent(
role=AuthorRole.ASSISTANT,
content=message.get("content", "")
)
but it doesn't work.