@@ -23,10 +23,10 @@ final class ServerPromptTemplateIntegrationTests: XCTestCase {
2323 }
2424
2525 func testGenerateContentWithText( ) async throws {
26- let model = FirebaseAI . firebaseAI ( backend: . vertexAI( ) ) . templateGenerativeModel ( )
26+ let model = FirebaseAI . firebaseAI ( backend: . vertexAI( location : " global " ) ) . templateGenerativeModel ( )
2727 let userName = " paul "
2828 let response = try await model. generateContent (
29- template: " greeting " ,
29+ template: " greeting2 " ,
3030 variables: [
3131 " name " : userName,
3232 " language " : " Spanish " ,
@@ -40,7 +40,7 @@ final class ServerPromptTemplateIntegrationTests: XCTestCase {
4040 let model = FirebaseAI . firebaseAI ( backend: . vertexAI( ) ) . templateGenerativeModel ( )
4141 let userName = " paul "
4242 let stream = try model. generateContentStream (
43- template: " greeting " ,
43+ template: " greeting.prompt " ,
4444 variables: [
4545 " name " : userName,
4646 " language " : " English " ,
@@ -59,7 +59,7 @@ final class ServerPromptTemplateIntegrationTests: XCTestCase {
5959 let imagenModel = FirebaseAI . firebaseAI ( backend: . vertexAI( ) ) . templateImagenModel ( )
6060 let imagenPrompt = " A cat picture "
6161 let response = try await imagenModel. generateImages (
62- template: " generate_images " ,
62+ template: " generate_images.prompt " ,
6363 variables: [
6464 " prompt " : imagenPrompt,
6565 ]
@@ -74,7 +74,7 @@ final class ServerPromptTemplateIntegrationTests: XCTestCase {
7474 let base64Image = imageBytes. base64EncodedString ( )
7575
7676 let response = try await model. generateContent (
77- template: " media " ,
77+ template: " media.prompt " ,
7878 variables: [
7979 " imageData " : [
8080 " isInline " : true ,
@@ -96,7 +96,7 @@ final class ServerPromptTemplateIntegrationTests: XCTestCase {
9696 let base64Image = imageBytes. base64EncodedString ( )
9797
9898 let stream = try model. generateContentStream (
99- template: " media " ,
99+ template: " media.prompt " ,
100100 variables: [
101101 " imageData " : [
102102 " isInline " : true ,
@@ -123,7 +123,7 @@ final class ServerPromptTemplateIntegrationTests: XCTestCase {
123123 ModelContent ( role: " user " , parts: " Hello! " ) ,
124124 ModelContent ( role: " model " , parts: " Hi there! How can I help? " ) ,
125125 ]
126- let chatSession = model. startChat ( template: " chat_history " , history: initialHistory)
126+ let chatSession = model. startChat ( template: " chat_history.prompt " , history: initialHistory)
127127
128128 let userMessage = " What's the weather like? "
129129
@@ -142,7 +142,7 @@ final class ServerPromptTemplateIntegrationTests: XCTestCase {
142142 ModelContent ( role: " user " , parts: " Hello! " ) ,
143143 ModelContent ( role: " model " , parts: " Hi there! How can I help? " ) ,
144144 ]
145- let chatSession = model. startChat ( template: " chat_history " , history: initialHistory)
145+ let chatSession = model. startChat ( template: " chat_history.prompt " , history: initialHistory)
146146
147147 let userMessage = " What's the weather like? "
148148
0 commit comments