Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions app/lib/services/notifications/notification_service_fcm.dart
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import 'package:omi/services/notifications/action_item_notification_handler.dart
import 'package:omi/services/notifications/important_conversation_notification_handler.dart';
import 'package:omi/services/notifications/merge_notification_handler.dart';
import 'package:omi/services/notifications/notification_interface.dart';
import 'package:omi/services/voice_playback/omi_voice_playback_service.dart';
import 'package:omi/utils/analytics/intercom.dart';
import 'package:omi/utils/logger.dart';

Expand Down Expand Up @@ -238,14 +239,18 @@ class _FCMNotificationService implements NotificationInterface {
_serverMessageStreamController.add(ServerMessage.fromJson(data));
}
if (noti != null && _shouldShowForegroundNotificationOnFCMMessageReceived()) {
_showForegroundNotification(noti: noti, payload: payload);
if (!OmiVoicePlaybackService.instance.isSpeaking) {
_showForegroundNotification(noti: noti, payload: payload);
}
}
return;
}

// Announcement likes
if (noti != null && _shouldShowForegroundNotificationOnFCMMessageReceived()) {
_showForegroundNotification(noti: noti, layout: NotificationLayout.BigText);
if (!OmiVoicePlaybackService.instance.isSpeaking) {
_showForegroundNotification(noti: noti, layout: NotificationLayout.BigText);
}
return;
}
});
Expand Down
82 changes: 58 additions & 24 deletions app/lib/services/voice_playback/omi_voice_playback_service.dart
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class OmiVoicePlaybackService {
OmiVoicePlaybackService._();
static final OmiVoicePlaybackService instance = OmiVoicePlaybackService._();

final AudioPlayer _player = AudioPlayer();
final AudioPlayer _player = AudioPlayer(handleInterruptions: false);
final FlutterTts _fallbackTts = FlutterTts();

bool _initialized = false;
Expand All @@ -42,6 +42,7 @@ class OmiVoicePlaybackService {
bool _synthesizing = false;
bool _isPlayingQueue = false;
bool _sessionActive = false;
bool _pausedByInterruption = false;

bool get isSpeaking => _sessionActive && (_isPlayingQueue || _audioQueue.isNotEmpty || _synthesizing);

Expand All @@ -51,15 +52,19 @@ class OmiVoicePlaybackService {

try {
final session = await AudioSession.instance;
await session.configure(const AudioSessionConfiguration.speech());
session.interruptionEventStream.listen((event) {
if (event.begin) {
_player.pause();
} else {
// Don't auto-resume — the reply is stale after an interruption.
interrupt();
}
});
await session.configure(
const AudioSessionConfiguration(
avAudioSessionCategory: AVAudioSessionCategory.playback,
avAudioSessionMode: AVAudioSessionMode.voicePrompt,
androidAudioAttributes: AndroidAudioAttributes(
contentType: AndroidAudioContentType.speech,
usage: AndroidAudioUsage.assistant,
),
androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
androidWillPauseWhenDucked: false,
),
);
session.interruptionEventStream.listen(_onInterruption);
// Stop immediately when headphones are unplugged mid-playback so the
// reply doesn't suddenly blast out of the phone speaker in public.
session.becomingNoisyEventStream.listen((_) {
Expand Down Expand Up @@ -102,8 +107,8 @@ class OmiVoicePlaybackService {
}
}

if (_activeMessageId == messageId) {
// Same response already in-flight; no-op.
if (_activeMessageId == messageId && isSpeaking) {
// Same response actively in-flight; no-op (rapid-double-call guard).
return;
}

Expand Down Expand Up @@ -145,15 +150,12 @@ class OmiVoicePlaybackService {

/// Called on every streamed text update. [fullText] is the cumulative AI
/// response so far. [isFinal] means this is the last chunk.
void updateStreamingResponse({
required String messageId,
required String fullText,
required bool isFinal,
}) {
void updateStreamingResponse({required String messageId, required String fullText, required bool isFinal}) {
if (SharedPreferencesUtil().voiceResponseMode == 0) return;
if (_activeMessageId != messageId) {
Logger.log(
'OmiVoicePlayback: updateStreamingResponse skipped — activeId=$_activeMessageId != incoming=$messageId');
'OmiVoicePlayback: updateStreamingResponse skipped — activeId=$_activeMessageId != incoming=$messageId',
);
return;
}
Logger.log('OmiVoicePlayback: updateStreamingResponse len=${fullText.length} isFinal=$isFinal spoken=$_spoken');
Expand Down Expand Up @@ -195,6 +197,7 @@ class OmiVoicePlaybackService {
_audioQueue.clear();
_synthesizing = false;
_isPlayingQueue = false;
_pausedByInterruption = false;
try {
await _player.stop();
} catch (_) {}
Expand All @@ -204,6 +207,41 @@ class OmiVoicePlaybackService {
await _deactivateSession();
}

void _onInterruption(AudioInterruptionEvent event) {
debugPrint(
'OmiVoicePlayback: interruption begin=${event.begin} type=${event.type} '
'activeId=$_activeMessageId isSpeaking=$isSpeaking pausedByInt=$_pausedByInterruption',
);
if (event.begin) {
switch (event.type) {
case AudioInterruptionType.duck:
break;
case AudioInterruptionType.pause:
if (_player.playing) {
_pausedByInterruption = true;
_player.pause();
}
break;
case AudioInterruptionType.unknown:
interrupt();
break;
}
} else {
switch (event.type) {
case AudioInterruptionType.duck:
break;
case AudioInterruptionType.pause:
if (_pausedByInterruption) {
_pausedByInterruption = false;
_player.play();
}
break;
Comment on lines +233 to +238
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 _player.play() in the end+pause branch is unawaited and has no error handling. If _clearInFlightState() was called between the begin and end events (or the player is in an idle/error state because the audio source was swapped), the call can throw an unhandled exception that silently disappears as an unawaited Future. Wrapping it in a try/catch keeps the handler robust.

Suggested change
case AudioInterruptionType.pause:
if (_pausedByInterruption) {
_pausedByInterruption = false;
_player.play();
}
break;
case AudioInterruptionType.pause:
if (_pausedByInterruption) {
_pausedByInterruption = false;
try {
_player.play();
} catch (_) {}
}
break;

case AudioInterruptionType.unknown:
break;
}
}
}

// ---------------------------------------------------------------------------
// Internals
// ---------------------------------------------------------------------------
Expand All @@ -213,6 +251,7 @@ class OmiVoicePlaybackService {
_audioQueue.clear();
_synthesizing = false;
_isPlayingQueue = false;
_pausedByInterruption = false;
try {
await _player.stop();
} catch (_) {}
Expand Down Expand Up @@ -347,12 +386,7 @@ class OmiVoicePlaybackService {

/// Returns the index at which to cut the next chunk, or null if we should
/// wait for more text. Mirrors `FloatingBarVoicePlaybackService.nextChunkBoundary`.
int? _nextChunkBoundary(
String text, {
required int start,
required bool isFirstChunk,
required bool isFinal,
}) {
int? _nextChunkBoundary(String text, {required int start, required bool isFirstChunk, required bool isFinal}) {
final remaining = text.length - start;
final minChars = isFirstChunk ? _firstChunkMinChars : _chunkMinChars;
final idealChars = isFirstChunk ? _firstChunkIdealChars : _chunkIdealChars;
Expand Down
Loading