branch: externals/llm
commit 6d6c88c20a2b485ade7bac53bdfb44ed8f91c279
Author: Andrew Hyatt <ahy...@gmail.com>
Commit: GitHub <nore...@github.com>

    Fix Gemini streaming tool calls (#205)
    
    I had used the wrong identifier for the tool result plist internally,
    which is now fixed.
    
    This fixes https://github.com/ahyatt/llm/issues/203.
---
 NEWS.org      | 1 +
 llm-vertex.el | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/NEWS.org b/NEWS.org
index 0e00dc2e1a..e667fffa8c 100644
--- a/NEWS.org
+++ b/NEWS.org
@@ -2,6 +2,7 @@
 - Add thinking control to Gemini / Vertex.
 - Change default Vertex, Gemini model to Gemini 2.5 Pro.
 - Add Gemini 2.5 Flash model
+- Fix Vertex / Gemini streaming tool calls
 * Version 0.27.0
 - Add =thinking= option to control the amount of thinking that happens for 
reasoning models.
 - Fix incorrectly low default Claude max tokens
diff --git a/llm-vertex.el b/llm-vertex.el
index 852592ad87..8f95de0db7 100644
--- a/llm-vertex.el
+++ b/llm-vertex.el
@@ -294,7 +294,7 @@ which is necessary to properly set some paremeters."
            (if-let ((response (llm-provider-chat-extract-result provider 
element)))
                (funcall receiver `(:text ,response))
              (when-let ((fc (llm-provider-extract-tool-uses provider element)))
-               (funcall receiver `(:tool-call ,fc))))))))
+               (funcall receiver `(:tool-uses ,fc))))))))
 
 (cl-defmethod llm-provider-collect-streaming-tool-uses ((_ llm-google) data)
   (car data))

Reply via email to