Skip to content

Commit b2cfd91

Browse files
authored
Merge pull request #4 from liberate-org/fix/torch-hub-fix
Fix/torch hub fix
2 parents 322c0fc + d144f52 commit b2cfd91

File tree

2 files changed

+8
-5
lines changed

2 files changed

+8
-5
lines changed

vocode/streaming/input_device/silero_vad.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,17 @@ def _load_model(self, use_onnx: bool = False) -> torch.nn.Module:
2222
repo_or_dir='silero-vad',
2323
model='silero_vad',
2424
source='local',
25-
onnx=use_onnx
25+
onnx=use_onnx,
26+
trust_repo=True
2627
)
2728
except FileNotFoundError:
2829
self.logger.warning("Could not find local VAD model, downloading from GitHub!")
2930
model, _ = torch.hub.load(
3031
repo_or_dir='snakers4/silero-vad',
3132
model='silero_vad',
3233
source='github',
33-
onnx=use_onnx
34+
onnx=use_onnx,
35+
trust_repo=True
3436
)
3537
return model
3638

vocode/streaming/streaming_conversation.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -320,6 +320,7 @@ async def process(self, item: InterruptibleAgentResponseEvent[AgentResponse]):
320320
is_interruptible=item.is_interruptible,
321321
agent_response_tracker=item.agent_response_tracker,
322322
)
323+
self.conversation.mark_last_agent_response()
323324
except asyncio.CancelledError:
324325
pass
325326

@@ -385,7 +386,6 @@ async def process(
385386
await self.conversation.terminate()
386387
except asyncio.TimeoutError:
387388
pass
388-
self.conversation.mark_last_agent_response()
389389
except asyncio.CancelledError:
390390
pass
391391

@@ -712,6 +712,7 @@ async def send_speech_to_output(
712712
"Sent chunk {} with size {}".format(chunk_idx, len(chunk_result.chunk))
713713
)
714714
self.mark_last_action_timestamp()
715+
self.mark_last_agent_response()
715716
chunk_idx += 1
716717
seconds_spoken += seconds_per_chunk
717718
if transcript_message:
@@ -790,7 +791,7 @@ async def check_if_human_should_be_prompted(self):
790791
if self.last_agent_response and self.last_final_transcript_from_human:
791792
last_human_touchpoint = time.time() - self.last_final_transcript_from_human
792793
last_agent_touchpoint = time.time() - self.last_agent_response
793-
if last_human_touchpoint >= reengage_timeout and last_agent_touchpoint >= reengage_timeout:
794+
if (last_human_touchpoint >= reengage_timeout) and (last_agent_touchpoint >= reengage_timeout):
794795
reengage_statement = random.choice(reengage_options)
795796
self.logger.debug(f"Prompting user with {reengage_statement}: no interaction has happened in {reengage_timeout} seconds")
796797
self.chunk_size = (
@@ -812,7 +813,7 @@ async def check_if_human_should_be_prompted(self):
812813
agent_response_tracker=asyncio.Event(),
813814
)
814815
self.mark_last_agent_response()
815-
await asyncio.sleep(1)
816+
await asyncio.sleep(2.5)
816817
else:
817818
await asyncio.sleep(1)
818819
self.logger.debug("stopped check if human should be prompted")

0 commit comments

Comments
 (0)