@@ -682,3 +682,113 @@ def test_max_tool_calls_with_builtin_tools(openai_client, client_with_models, te
682682
683683 # Verify we have a valid max_tool_calls field
684684 assert response_3 .max_tool_calls == max_tool_calls [1 ]
685+
686+
687+ @pytest .mark .skip (reason = "Tool calling is not reliable." )
688+ def test_parallel_tool_calls_true (openai_client , client_with_models , text_model_id ):
689+ """Test handling of parallel_tool_calls with function tools in responses."""
690+ if isinstance (client_with_models , LlamaStackAsLibraryClient ):
691+ pytest .skip ("OpenAI responses are not supported when testing with library client yet." )
692+
693+ client = openai_client
694+ parallel_tool_calls = True
695+
696+ tools = [
697+ {
698+ "type" : "function" ,
699+ "name" : "get_weather" ,
700+ "description" : "Get weather information for a specified location" ,
701+ "parameters" : {
702+ "type" : "object" ,
703+ "properties" : {
704+ "location" : {
705+ "type" : "string" ,
706+ "description" : "The city name (e.g., 'New York', 'London')" ,
707+ },
708+ },
709+ },
710+ }
711+ ]
712+
713+ # First create a response that triggers function tools
714+ response = client .responses .create (
715+ model = text_model_id ,
716+ input = "Get the weather in New York and in Paris" ,
717+ tools = tools ,
718+ stream = False ,
719+ parallel_tool_calls = parallel_tool_calls ,
720+ )
721+
722+ # Verify we got two function calls
723+ assert len (response .output ) == 2
724+ assert response .output [0 ].type == "function_call"
725+ assert response .output [0 ].name == "get_weather"
726+ assert response .output [0 ].status == "completed"
727+ assert response .output [1 ].type == "function_call"
728+ assert response .output [1 ].name == "get_weather"
729+ assert response .output [0 ].status == "completed"
730+
731+ # Verify we have a valid parallel_tool_calls field
732+ assert response .parallel_tool_calls == parallel_tool_calls
733+
734+
735+ @pytest .mark .skip (reason = "Tool calling is not reliable." )
736+ def test_parallel_tool_calls_false (openai_client , client_with_models , text_model_id ):
737+ """Test handling of parallel_tool_calls with function tools in responses."""
738+ if isinstance (client_with_models , LlamaStackAsLibraryClient ):
739+ pytest .skip ("OpenAI responses are not supported when testing with library client yet." )
740+
741+ client = openai_client
742+ parallel_tool_calls = False
743+
744+ tools = [
745+ {
746+ "type" : "function" ,
747+ "name" : "get_weather" ,
748+ "description" : "Get weather information for a specified location" ,
749+ "parameters" : {
750+ "type" : "object" ,
751+ "properties" : {
752+ "location" : {
753+ "type" : "string" ,
754+ "description" : "The city name (e.g., 'New York', 'London')" ,
755+ },
756+ },
757+ },
758+ }
759+ ]
760+
761+ # First create a response that triggers function tools
762+ response = client .responses .create (
763+ model = text_model_id ,
764+ input = "Get the weather in New York and in Paris" ,
765+ tools = tools ,
766+ stream = False ,
767+ parallel_tool_calls = parallel_tool_calls ,
768+ )
769+
770+ # Verify we got the first function call
771+ assert len (response .output ) == 1
772+ assert response .output [0 ].type == "function_call"
773+ assert response .output [0 ].name == "get_weather"
774+ assert response .output [0 ].status == "completed"
775+
776+ # Verify we have a valid parallel_tool_calls field
777+ assert response .parallel_tool_calls == parallel_tool_calls
778+
779+ response2 = client .responses .create (
780+ model = text_model_id ,
781+ input = [
782+ {"role" : "user" , "content" : "Check the weather in Paris and New York." },
783+ {"call_id" : response .output [0 ].call_id , "type" : "function_call_output" , "output" : "18 c" },
784+ ],
785+ tools = tools ,
786+ stream = False ,
787+ parallel_tool_calls = parallel_tool_calls ,
788+ )
789+
790+ # Verify we got the second function call
791+ assert len (response .output ) == 1
792+ assert response2 .output [0 ].type == "function_call"
793+ assert response2 .output [0 ].name == "get_weather"
794+ assert response2 .output [0 ].status == "completed"
0 commit comments