diff --git a/genai/live/live-audio-with-txt.js b/genai/live/live-audio-with-txt.js new file mode 100644 index 0000000000..e6c257862d --- /dev/null +++ b/genai/live/live-audio-with-txt.js @@ -0,0 +1,122 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// [START googlegenaisdk_live_audio_with_txt] + +'use strict'; + +const {GoogleGenAI, Modality} = require('@google/genai'); +const fs = require('fs'); + +const GOOGLE_CLOUD_PROJECT = process.env.GOOGLE_CLOUD_PROJECT; +const GOOGLE_CLOUD_LOCATION = process.env.GOOGLE_CLOUD_LOCATION || 'global'; + +async function generateLiveConversation( + projectId = GOOGLE_CLOUD_PROJECT, + location = GOOGLE_CLOUD_LOCATION +) { + const client = new GoogleGenAI({ + vertexai: true, + project: projectId, + location: location, + }); + + const voiceName = 'Aoede'; + const modelId = 'gemini-2.0-flash-live-preview-04-09'; + const config = { + responseModalities: [Modality.AUDIO], + speechConfig: { + voiceConfig: { + prebuiltVoiceConfig: { + voiceName: voiceName, + }, + }, + }, + }; + + const responseQueue = []; + + async function waitMessage() { + while (responseQueue.length === 0) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + return responseQueue.shift(); + } + + async function handleTurn() { + const audioChunks = []; + let done = false; + + while (!done) { + const message = await waitMessage(); + + const serverContent = message.serverContent; + if ( + serverContent && + serverContent.modelTurn && + serverContent.modelTurn.parts + ) { + for (const part of serverContent.modelTurn.parts) { + if (part && part.inlineData && part.inlineData.data) { + audioChunks.push(Buffer.from(part.inlineData.data)); + } + } + } + + if (serverContent && serverContent.turnComplete) { + done = true; + } + } + + return audioChunks; + } + + const session = await client.live.connect({ + model: modelId, + config: config, + callbacks: { + onmessage: msg => responseQueue.push(msg), + onerror: e => console.error('Error:', e.message), + }, + }); + + const textInput = 'Hello? Gemini are you there?'; + console.log('> ', textInput, '\n'); + + await session.sendClientContent({ + turns: [{role: 'user', parts: [{text: textInput}]}], + }); + + const audioChunks = await handleTurn(); + + session.close(); + + if (audioChunks.length > 0) { + const audioBuffer = Buffer.concat(audioChunks); + fs.writeFileSync('response.raw', audioBuffer); + console.log('Received audio answer (saved to response.raw)'); + } + + // Example output: + //> Hello? Gemini, are you there? + // Received audio answer (saved to response.raw) + + return audioChunks; +} + +// [END googlegenaisdk_live_audio_with_txt] + +module.exports = { + generateLiveConversation, +}; diff --git a/genai/live/live-ground-ragengine-with-txt.js b/genai/live/live-ground-ragengine-with-txt.js new file mode 100644 index 0000000000..04c83f34fc --- /dev/null +++ b/genai/live/live-ground-ragengine-with-txt.js @@ -0,0 +1,124 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// [START googlegenaisdk_live_ground_ragengine_with_txt] + +'use strict'; + +const {GoogleGenAI, Modality} = require('@google/genai'); + +const GOOGLE_CLOUD_PROJECT = process.env.GOOGLE_CLOUD_PROJECT; +const GOOGLE_CLOUD_LOCATION = process.env.GOOGLE_CLOUD_LOCATION || 'global'; + +// (DEVELOPER) put here your memory corpus +const MEMORY_CORPUS = + 'projects/cloud-ai-devrel-softserve/locations/us-central1/ragCorpora/2305843009213693952'; + +async function generateLiveRagTextResponse( + memoryCorpus = MEMORY_CORPUS, + projectId = GOOGLE_CLOUD_PROJECT, + location = GOOGLE_CLOUD_LOCATION +) { + const client = new GoogleGenAI({ + vertexai: true, + project: projectId, + location: location, + }); + + const modelId = 'gemini-2.0-flash-live-preview-04-09'; + + // RAG store config + const ragStore = { + ragResources: [ + { + ragCorpus: memoryCorpus, // Use memory corpus if you want to store context + }, + ], + storeContext: true, // sink context into your memory corpus + }; + + const config = { + responseModalities: [Modality.TEXT], + tools: [ + { + retrieval: { + vertexRagStore: ragStore, + }, + }, + ], + }; + + const responseQueue = []; + + async function waitMessage() { + while (responseQueue.length === 0) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + return responseQueue.shift(); + } + + async function handleTurn() { + const turns = []; + let done = false; + while (!done) { + const message = await waitMessage(); + turns.push(message); + if (message.serverContent && message.serverContent.turnComplete) { + done = true; + } + } + return turns; + } + + const session = await client.live.connect({ + model: modelId, + config: config, + callbacks: { + onmessage: msg => responseQueue.push(msg), + onerror: e => console.error('Error:', e.message), + }, + }); + + const textInput = 'What are newest gemini models?'; + console.log('> ', textInput, '\n'); + + await session.sendClientContent({ + turns: [{role: 'user', parts: [{text: textInput}]}], + }); + + const turns = await handleTurn(); + const response = []; + + for (const turn of turns) { + if (turn.text) { + response.push(turn.text); + } + } + + console.log(response.join('')); + + // Example output: + // > What are newest gemini models? + // In December 2023, Google launched Gemini, their "most capable and general model". It's multimodal, meaning it understands and combines different types of information like text, code, audio, images, and video. + + session.close(); + + return response; +} + +// [END googlegenaisdk_live_ground_ragengine_with_txt] + +module.exports = { + generateLiveRagTextResponse, +}; diff --git a/genai/live/live-structured-ouput-with-txt.js b/genai/live/live-structured-ouput-with-txt.js new file mode 100644 index 0000000000..f77bba8f98 --- /dev/null +++ b/genai/live/live-structured-ouput-with-txt.js @@ -0,0 +1,93 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// [START googlegenaisdk_live_structured_output_with_txt] + +'use strict'; +const {OpenAI} = require('openai'); +const {GoogleAuth} = require('google-auth-library'); + +const GOOGLE_CLOUD_PROJECT = process.env.GOOGLE_CLOUD_PROJECT; +const GOOGLE_CLOUD_LOCATION = + process.env.GOOGLE_CLOUD_LOCATION || 'us-central1'; + +const CalendarEventSchema = { + type: 'object', + properties: { + name: {type: 'string'}, + date: {type: 'string'}, + participants: { + type: 'array', + items: {type: 'string'}, + }, + }, + required: ['name', 'date', 'participants'], +}; + +async function generateStructuredTextResponse( + projectId = GOOGLE_CLOUD_PROJECT, + location = GOOGLE_CLOUD_LOCATION +) { + const auth = new GoogleAuth({ + scopes: ['https://www.googleapis.com/auth/cloud-platform'], + }); + const client = await auth.getClient(); + const tokenResponse = await client.getAccessToken(); + + const token = tokenResponse.token; + + const ENDPOINT_ID = 'openapi'; + const baseURL = `https://${location}-aiplatform.googleapis.com/v1/projects/${projectId}/locations/${location}/endpoints/${ENDPOINT_ID}`; + + const openAI = new OpenAI({ + apiKey: token, + baseURL: baseURL, + }); + + const completion = await openAI.chat.completions.create({ + model: 'google/gemini-2.0-flash-001', + messages: [ + {role: 'system', content: 'Extract the event information.'}, + { + role: 'user', + content: 'Alice and Bob are going to a science fair on Friday.', + }, + ], + response_format: { + type: 'json_schema', + json_schema: { + name: 'CalendarEvent', + schema: CalendarEventSchema, + }, + }, + }); + + const response = completion.choices[0].message.content; + console.log(response); + + // Example expected output: + // { + // name: 'science fair', + // date: 'Friday', + // participants: ['Alice', 'Bob'] + // } + + return response; +} + +// [END googlegenaisdk_live_structured_output_with_txt] + +module.exports = { + generateStructuredTextResponse, +}; diff --git a/genai/live/live-transcribe-with-audio.js b/genai/live/live-transcribe-with-audio.js new file mode 100644 index 0000000000..dcc23c1f0c --- /dev/null +++ b/genai/live/live-transcribe-with-audio.js @@ -0,0 +1,110 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// [START googlegenaisdk_live_transcribe_with_audio] + +'use strict'; + +const {GoogleGenAI, Modality} = require('@google/genai'); + +const GOOGLE_CLOUD_PROJECT = process.env.GOOGLE_CLOUD_PROJECT; +const GOOGLE_CLOUD_LOCATION = process.env.GOOGLE_CLOUD_LOCATION || 'global'; + +async function generateLiveAudioTranscription( + projectId = GOOGLE_CLOUD_PROJECT, + location = GOOGLE_CLOUD_LOCATION +) { + const client = new GoogleGenAI({ + vertexai: true, + project: projectId, + location: location, + }); + + const modelId = 'gemini-live-2.5-flash-preview-native-audio'; + const config = { + responseModalities: [Modality.AUDIO], + inputAudioTranscription: {}, + outputAudioTranscription: {}, + }; + + const responseQueue = []; + + async function waitMessage() { + while (responseQueue.length === 0) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + return responseQueue.shift(); + } + + async function handleTurn() { + const turns = []; + let done = false; + const outputMessage = []; + while (!done) { + const message = await waitMessage(); + turns.push(message); + + const serverContent = message.serverContent; + if (serverContent && serverContent.modelTurn) { + console.log('Model turn:', serverContent.modelTurn); + } + if (serverContent && serverContent.inputTranscription) { + console.log('Input transcript:', serverContent.inputTranscription.text); + } + if ( + serverContent && + serverContent.outputTranscription && + serverContent.outputTranscription.text + ) { + outputMessage.push(serverContent.outputTranscription.text); + } + if (serverContent && serverContent.turnComplete) { + done = true; + } + } + console.log('Output transcript:', outputMessage.join('')); + return turns; + } + + const session = await client.live.connect({ + model: modelId, + config: config, + callbacks: { + onmessage: msg => responseQueue.push(msg), + onerror: e => console.error('Error:', e.message), + }, + }); + + const inputTxt = 'Hello? Gemini, are you there?'; + console.log('> ', inputTxt, '\n'); + + await session.sendClientContent({ + turns: [{role: 'user', parts: [{text: inputTxt}]}], + }); + + const turns = await handleTurn(session); + + // Example output: + //> Hello? Gemini, are you there? + // Yes, I'm here. What would you like to talk about? + + session.close(); + return turns; +} + +// [END googlegenaisdk_live_transcribe_with_audio] + +module.exports = { + generateLiveAudioTranscription, +}; diff --git a/genai/live/live-txt-with-audio.js b/genai/live/live-txt-with-audio.js new file mode 100644 index 0000000000..92a5fdd594 --- /dev/null +++ b/genai/live/live-txt-with-audio.js @@ -0,0 +1,111 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// [START googlegenaisdk_live_txt_with_audio] + +'use strict'; + +const {GoogleGenAI, Modality} = require('@google/genai'); +const fetch = require('node-fetch'); +const GOOGLE_CLOUD_PROJECT = process.env.GOOGLE_CLOUD_PROJECT; +const GOOGLE_CLOUD_LOCATION = process.env.GOOGLE_CLOUD_LOCATION || 'global'; + +async function generateLiveConversation( + projectId = GOOGLE_CLOUD_PROJECT, + location = GOOGLE_CLOUD_LOCATION +) { + const client = new GoogleGenAI({ + vertexai: true, + project: projectId, + location: location, + }); + + const modelId = 'gemini-2.0-flash-live-preview-04-09'; + const config = { + responseModalities: [Modality.TEXT], + }; + + const responseQueue = []; + + async function waitMessage() { + while (responseQueue.length === 0) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + return responseQueue.shift(); + } + + async function handleTurn() { + const turns = []; + let done = false; + while (!done) { + const message = await waitMessage(); + turns.push(message); + if (message.serverContent && message.serverContent.turnComplete) { + done = true; + } + } + return turns; + } + + const session = await client.live.connect({ + model: modelId, + config: config, + callbacks: { + onmessage: msg => responseQueue.push(msg), + onerror: e => console.error('Error:', e.message), + }, + }); + + const audioUrl = + 'https://storage.googleapis.com/generativeai-downloads/data/16000.wav'; + + console.log('> Answer to this audio url', audioUrl); + + const res = await fetch(audioUrl); + if (!res.ok) throw new Error(`Failed to fetch audio: ${res.status}`); + const arrayBuffer = await res.arrayBuffer(); + const audioBytes = Buffer.from(arrayBuffer).toString('base64'); + + await session.sendRealtimeInput({ + media: { + data: audioBytes, + mimeType: 'audio/pcm;rate=16000', + }, + }); + + const turns = await handleTurn(); + + const response = []; + for (const turn of turns) { + if (turn.text) { + response.push(turn.text); + } + } + + console.log('Final response:', response.join('')); + + // Example output: + //> Answer to this audio url https://storage.googleapis.com/generativeai-downloads/data/16000.wav + // Final response: Yes, I can hear you. How are you doing today? + + session.close(); + + return response; +} + +// [END googlegenaisdk_live_txt_with_audio] + +module.exports = { + generateLiveConversation, +}; diff --git a/genai/package.json b/genai/package.json index 9d94f3811b..fb2e402300 100644 --- a/genai/package.json +++ b/genai/package.json @@ -15,15 +15,19 @@ "dependencies": { "@google/genai": "1.20.0", "axios": "^1.6.2", + "google-auth-library": "^10.3.0", "luxon": "^3.7.1", + "node-fetch": "^3.3.2", + "openai": "^5.19.1", "supertest": "^7.0.0" }, "devDependencies": { "c8": "^10.0.0", "chai": "^4.5.0", "mocha": "^10.0.0", + "node-fetch": "^3.3.2", + "proxyquire": "^2.1.3", "sinon": "^18.0.0", - "uuid": "^10.0.0", - "proxyquire": "^2.1.3" + "uuid": "^10.0.0" } } diff --git a/genai/test/live-audio-with-txt.test.js b/genai/test/live-audio-with-txt.test.js new file mode 100644 index 0000000000..9dca58c3fa --- /dev/null +++ b/genai/test/live-audio-with-txt.test.js @@ -0,0 +1,30 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); + +const projectId = process.env.CAIP_PROJECT_ID; +const sample = require('../live/live-audio-with-txt'); + +describe('live-audio-with-txt', () => { + it('should generate audio content in a live session conversation from a text prompt', async function () { + this.timeout(180000); + const output = await sample.generateLiveConversation(projectId); + console.log('Generated output:', output); + assert(output.length > 0); + }); +}); diff --git a/genai/test/live-ground-ragengine-with-txt.test.js b/genai/test/live-ground-ragengine-with-txt.test.js new file mode 100644 index 0000000000..c98fa71908 --- /dev/null +++ b/genai/test/live-ground-ragengine-with-txt.test.js @@ -0,0 +1,67 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); +const proxyquire = require('proxyquire'); + +const {delay} = require('./util'); + +describe('live-ground-ragengine-with-txt', () => { + it('should return text from mocked RAG session', async function () { + const fakeSession = { + sendClientContent: async () => {}, + close: async () => {}, + }; + + const mockClient = { + live: { + connect: async (opts = {}) => { + setImmediate(() => + opts.callbacks.onmessage({ + text: 'In December 2023, Google launched Gemini...', + serverContent: {turnComplete: false}, + }) + ); + setImmediate(() => + opts.callbacks.onmessage({ + text: 'Mock final message.', + serverContent: {turnComplete: true}, + }) + ); + + return fakeSession; + }, + }, + }; + + const sample = proxyquire('../live/live-ground-ragengine-with-txt', { + '@google/genai': { + GoogleGenAI: function () { + return mockClient; + }, + Modality: {TEXT: 'TEXT'}, + }, + }); + + this.timeout(10000); + this.retries(4); + await delay(this.test); + const output = await sample.generateLiveRagTextResponse(); + console.log('Generated output:', output); + assert(output.length > 0); + }); +}); diff --git a/genai/test/live-structured-ouput-with-txt.test.js b/genai/test/live-structured-ouput-with-txt.test.js new file mode 100644 index 0000000000..b26e1e3092 --- /dev/null +++ b/genai/test/live-structured-ouput-with-txt.test.js @@ -0,0 +1,30 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); + +const projectId = process.env.CAIP_PROJECT_ID; +const sample = require('../live/live-structured-ouput-with-txt'); + +describe('live-structured-ouput-with-txt', () => { + it('should extract structured information from text input using the model', async function () { + this.timeout(18000); + const output = await sample.generateStructuredTextResponse(projectId); + console.log('Generated output:', output); + assert(output.length > 0); + }); +}); diff --git a/genai/test/live-transcribe-with-audio.test.js b/genai/test/live-transcribe-with-audio.test.js new file mode 100644 index 0000000000..250dafb5e5 --- /dev/null +++ b/genai/test/live-transcribe-with-audio.test.js @@ -0,0 +1,30 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); + +const projectId = process.env.CAIP_PROJECT_ID; +const sample = require('../live/live-transcribe-with-audio'); + +describe('live-transcribe-with-audio', () => { + it('should transcribe audio input into text using the live model', async function () { + this.timeout(180000); + const output = await sample.generateLiveAudioTranscription(projectId); + console.log('Generated output:', output); + assert(output.length > 0); + }); +}); diff --git a/genai/test/live-txt-with-audio.test.js b/genai/test/live-txt-with-audio.test.js new file mode 100644 index 0000000000..c7de558118 --- /dev/null +++ b/genai/test/live-txt-with-audio.test.js @@ -0,0 +1,86 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); + +const projectId = process.env.CAIP_PROJECT_ID; +const {delay} = require('./util'); + +const proxyquire = require('proxyquire'); + +describe('live-txt-with-audio', () => { + it('should generate txt content in a live session from an audio', async function () { + const fakeFetch = async () => ({ + ok: true, + arrayBuffer: async () => Buffer.from('fake audio'), + }); + + const fakeClient = { + live: { + connect: async (opts = {}) => { + console.log('Mock is called'); + + if ( + opts && + opts.callbacks && + typeof opts.callbacks.onmessage === 'function' + ) { + setImmediate(() => + opts.callbacks.onmessage({ + text: 'Yes, I can hear you.', + serverContent: { + turnComplete: false, + }, + }) + ); + + setImmediate(() => + opts.callbacks.onmessage({ + text: 'Here is the final response.', + serverContent: { + turnComplete: true, + }, + }) + ); + } + + return { + sendRealtimeInput: async () => {}, + close: async () => {}, + }; + }, + }, + }; + + const sample = proxyquire('../live/live-txt-with-audio', { + 'node-fetch': fakeFetch, + '@google/genai': { + GoogleGenAI: function () { + return fakeClient; + }, + Modality: {TEXT: 'TEXT'}, + }, + }); + + this.timeout(180000); + this.retries(4); + await delay(this.test); + const output = await sample.generateLiveConversation(projectId); + console.log('Generated output:', output); + assert(output.length > 0); + }); +});