@@ -20,52 +20,59 @@ <h1>Screen and Speech Recorder</h1>
2020
2121 let mediaRecorder ;
2222 let recordedChunks = [ ] ;
23- let speechSynthesisStream ;
24-
25- async function getScreenAndAudioStream ( ) {
26- // Capture the screen
27- const screenStream = await navigator . mediaDevices . getDisplayMedia ( {
28- video : true
29- } ) ;
30-
31- // Capture the synthesized speech audio using an audio context
32- const audioContext = new AudioContext ( ) ;
33- const speechSynthesis = window . speechSynthesis ;
34- const utterance = new SpeechSynthesisUtterance ( textToRead . value ) ;
3523
36- const destination = audioContext . createMediaStreamDestination ( ) ;
37- speechSynthesis . speak ( utterance ) ;
24+ async function getScreenAndAudioStream ( ) {
25+ try {
26+ const screenStream = await navigator . mediaDevices . getDisplayMedia ( { video : true } ) ;
27+
28+ const audioContext = new ( window . AudioContext || window . webkitAudioContext ) ( ) ;
29+ const destination = audioContext . createMediaStreamDestination ( ) ;
30+ const utterance = new SpeechSynthesisUtterance ( textToRead . value ) ;
3831
39- // Pipe speech synthesis through the destination to create an audio stream
40- const synthSource = audioContext . createMediaStreamSource ( destination . stream ) ;
41- utterance . onstart = ( ) => synthSource . connect ( audioContext . destination ) ;
32+ utterance . onstart = ( ) => {
33+ const synthSource = audioContext . createMediaStreamSource ( destination . stream ) ;
34+ synthSource . connect ( audioContext . destination ) ;
35+ audioContext . resume ( ) ;
36+ } ;
4237
43- // Combine the screen and audio streams
44- speechSynthesisStream = destination . stream ;
45- const combinedStream = new MediaStream ( [ ...screenStream . getTracks ( ) , ...speechSynthesisStream . getTracks ( ) ] ) ;
38+ speechSynthesis . speak ( utterance ) ;
4639
47- return combinedStream ;
40+ const combinedStream = new MediaStream ( [ ...screenStream . getTracks ( ) , ...destination . stream . getTracks ( ) ] ) ;
41+ return combinedStream ;
42+ } catch ( error ) {
43+ console . error ( "Error accessing display media:" , error ) ;
44+ return null ;
45+ }
4846 }
4947
5048 startButton . addEventListener ( 'click' , async ( ) => {
5149 const stream = await getScreenAndAudioStream ( ) ;
52-
50+ if ( ! stream ) return ; // Exit if no stream is available
51+
5352 mediaRecorder = new MediaRecorder ( stream ) ;
5453 mediaRecorder . ondataavailable = ( event ) => {
54+ console . log ( "Data available:" , event . data . size ) ;
5555 if ( event . data . size > 0 ) {
5656 recordedChunks . push ( event . data ) ;
57+ } else {
58+ console . warn ( "No data available from ondataavailable event." ) ;
5759 }
5860 } ;
5961
6062 mediaRecorder . onstop = ( ) => {
61- const blob = new Blob ( recordedChunks , {
62- type : 'video/webm'
63- } ) ;
64- video . src = URL . createObjectURL ( blob ) ;
65- recordedChunks = [ ] ;
63+ if ( recordedChunks . length > 0 ) {
64+ const blob = new Blob ( recordedChunks , { type : 'video/webm' } ) ;
65+ const videoUrl = URL . createObjectURL ( blob ) ;
66+ video . src = videoUrl ;
67+ video . play ( ) ;
68+ recordedChunks = [ ] ;
69+ } else {
70+ console . error ( "No recorded data available." ) ;
71+ }
6672 } ;
6773
6874 mediaRecorder . start ( ) ;
75+ console . log ( "Media recorder started, stream active:" , stream . active ) ;
6976 startButton . disabled = true ;
7077 stopButton . disabled = false ;
7178 } ) ;
0 commit comments