diff --git a/app/lib/backend/schema/conversation.dart b/app/lib/backend/schema/conversation.dart index 113b41182c..957e272af7 100644 --- a/app/lib/backend/schema/conversation.dart +++ b/app/lib/backend/schema/conversation.dart @@ -23,7 +23,7 @@ class CreateConversationResponse { } } -enum ConversationSource { friend, workflow, openglass, screenpipe, sdcard } +enum ConversationSource { friend, workflow, openglass, screenpipe, sdcard, phone } class ConversationExternalData { final String text; @@ -193,6 +193,7 @@ class ServerConversation { if (source == ConversationSource.screenpipe) return 'Screenpipe'; if (source == ConversationSource.openglass) return 'Openglass'; if (source == ConversationSource.sdcard) return 'SD Card'; + if (source == ConversationSource.phone) return 'Phone Recording'; if (discarded) return 'Discarded'; return structured.category.substring(0, 1).toUpperCase() + structured.category.substring(1); } diff --git a/app/lib/pages/capture/voice_recording_page.dart b/app/lib/pages/capture/voice_recording_page.dart new file mode 100644 index 0000000000..cd148979a6 --- /dev/null +++ b/app/lib/pages/capture/voice_recording_page.dart @@ -0,0 +1,180 @@ +import 'dart:async'; +import 'dart:io'; + +import 'package:flutter/material.dart'; +import 'package:flutter_sound/flutter_sound.dart'; +import 'package:friend_private/backend/schema/conversation.dart'; +import 'package:friend_private/pages/conversation_detail/page.dart'; +import 'package:friend_private/providers/capture_provider.dart'; +import 'package:path_provider/path_provider.dart'; +import 'package:permission_handler/permission_handler.dart'; +import 'package:provider/provider.dart'; +import 'package:uuid/uuid.dart'; + +class VoiceRecordingPage extends StatefulWidget { + const VoiceRecordingPage({Key? key}) : super(key: key); + + @override + State createState() => _VoiceRecordingPageState(); +} + +class _VoiceRecordingPageState extends State { + final FlutterSoundRecorder _recorder = FlutterSoundRecorder(); + bool _isRecording = false; + String _recordingFilePath = ''; + Timer? _recordingTimer; + int _recordingDuration = 0; + final scaffoldKey = GlobalKey(); + + @override + void initState() { + super.initState(); + _initRecorder(); + } + + Future _initRecorder() async { + final status = await Permission.microphone.request(); + if (status != PermissionStatus.granted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('Microphone permission is required to record audio'), + ), + ); + return; + } + + await _recorder.openRecorder(); + _recorder.setSubscriptionDuration(const Duration(milliseconds: 500)); + } + + Future _startRecording() async { + try { + // Get the temporary directory + final tempDir = await getTemporaryDirectory(); + _recordingFilePath = '${tempDir.path}/voice_recording_${const Uuid().v4()}.wav'; + + // Start the recording + await _recorder.startRecorder( + toFile: _recordingFilePath, + codec: Codec.pcm16WAV, + ); + + // Start the recording timer + _recordingTimer = Timer.periodic(const Duration(seconds: 1), (timer) { + setState(() { + _recordingDuration++; + }); + }); + + setState(() { + _isRecording = true; + }); + } catch (e) { + debugPrint('Error starting recording: $e'); + } + } + + Future _stopRecording() async { + try { + await _recorder.stopRecorder(); + _recordingTimer?.cancel(); + + setState(() { + _isRecording = false; + }); + + // Process the recording + final captureProvider = Provider.of(context, listen: false); + await captureProvider.processPhoneRecording(_recordingFilePath); + + if (captureProvider.inProgressConversation != null) { + // Navigate to the conversation detail page + Navigator.pushReplacement( + context, + MaterialPageRoute( + builder: (context) => ConversationDetailPage( + conversation: captureProvider.inProgressConversation!, + ), + ), + ); + } + } catch (e) { + debugPrint('Error stopping recording: $e'); + } + } + + String _formatDuration(int seconds) { + final minutes = seconds ~/ 60; + final remainingSeconds = seconds % 60; + return '${minutes.toString().padLeft(2, '0')}:${remainingSeconds.toString().padLeft(2, '0')}'; + } + + @override + void dispose() { + _recordingTimer?.cancel(); + _recorder.closeRecorder(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + key: scaffoldKey, + backgroundColor: Theme.of(context).colorScheme.primary, + appBar: AppBar( + backgroundColor: Theme.of(context).colorScheme.primary, + title: Row( + mainAxisSize: MainAxisSize.max, + mainAxisAlignment: MainAxisAlignment.spaceBetween, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + IconButton( + onPressed: () { + Navigator.pop(context); + }, + icon: const Icon(Icons.arrow_back_rounded, size: 24.0), + ), + const SizedBox(width: 4), + const Text("🎙️"), + const SizedBox(width: 4), + const Expanded(child: Text("Voice Recording")), + ], + ), + ), + body: Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + if (_isRecording) + Text( + _formatDuration(_recordingDuration), + style: const TextStyle(fontSize: 48, color: Colors.white), + ), + const SizedBox(height: 40), + Container( + height: 80, + width: 80, + decoration: BoxDecoration( + color: _isRecording ? Colors.red : Colors.white, + shape: BoxShape.circle, + ), + child: IconButton( + icon: Icon( + _isRecording ? Icons.stop : Icons.mic, + size: 40, + color: _isRecording ? Colors.white : Colors.red, + ), + onPressed: _isRecording ? _stopRecording : _startRecording, + ), + ), + const SizedBox(height: 40), + Text( + _isRecording ? "Tap to stop recording" : "Tap to start recording", + style: const TextStyle(fontSize: 16, color: Colors.white), + ), + ], + ), + ), + ); + } +} \ No newline at end of file diff --git a/app/lib/pages/home/page.dart b/app/lib/pages/home/page.dart index 8a08138e6b..b48ae72f1a 100644 --- a/app/lib/pages/home/page.dart +++ b/app/lib/pages/home/page.dart @@ -40,6 +40,7 @@ import 'package:upgrader/upgrader.dart'; import '../conversations/sync_page.dart'; import 'widgets/battery_info_widget.dart'; +import 'package:friend_private/pages/capture/voice_recording_page.dart'; class HomePageWrapper extends StatefulWidget { final String? navigateToRoute; @@ -453,6 +454,26 @@ class _HomePageState extends State with WidgetsBindingObserver, Ticker } }, ), + Positioned( + right: 20, + bottom: 80, + child: FloatingActionButton( + heroTag: 'voice_recording_button', + backgroundColor: Colors.red, + onPressed: () { + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => const VoiceRecordingPage(), + ), + ); + }, + child: const Icon( + Icons.mic, + color: Colors.white, + size: 24, + ), + ), + ), ], ), ), diff --git a/app/lib/providers/capture_provider.dart b/app/lib/providers/capture_provider.dart index 0231fdff04..364aabcdd1 100644 --- a/app/lib/providers/capture_provider.dart +++ b/app/lib/providers/capture_provider.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:convert'; +import 'dart:io'; import 'dart:typed_data'; import 'package:flutter/material.dart'; @@ -911,4 +912,88 @@ class CaptureProvider extends ChangeNotifier sdCardReady = value; notifyListeners(); } + + // Add this method for processing phone recordings + Future processPhoneRecording(String filePath) async { + try { + debugPrint('Processing phone recording from: $filePath'); + // Reset any previous state + await _resetStateVariables(); + + // Ensure the transcript service is ready + if (!_transcriptServiceReady) { + await _initiateWebsocket(force: true); + } + + // Read the audio file + final audioFile = File(filePath); + if (!await audioFile.exists()) { + debugPrint('Audio file does not exist: $filePath'); + return; + } + + final audioBytes = await audioFile.readAsBytes(); + + // Create a conversation if not already created + if (_inProgressConversation == null) { + await _createConversation(ConversationSource.phone); + } + + // Process the audio bytes in chunks to simulate streaming + const chunkSize = 4096; // Adjust based on your needs + for (var i = 0; i < audioBytes.length; i += chunkSize) { + final end = (i + chunkSize < audioBytes.length) ? i + chunkSize : audioBytes.length; + final chunk = audioBytes.sublist(i, end); + + // Send the chunk to the websocket + _socket?.send(chunk); + + // Small delay to simulate streaming + await Future.delayed(const Duration(milliseconds: 10)); + } + + // Wait for transcription to complete + await Future.delayed(const Duration(seconds: 2)); + + // Finalize the conversation + if (_inProgressConversation != null) { + await _finalizeConversation(); + } + + notifyListeners(); + } catch (e) { + debugPrint('Error processing phone recording: $e'); + } + } + + // Helper method to create a conversation specifically for phone recordings + Future _createConversation(ConversationSource source) async { + try { + final createdConversation = await createConversation( + title: 'Voice Recording ${DateTime.now().toString().split('.')[0]}', + source: source, + ); + + if (createdConversation != null) { + _inProgressConversation = createdConversation; + notifyListeners(); + } + } catch (e) { + debugPrint('Error creating conversation: $e'); + } + } + + // Helper method to finalize the conversation + Future _finalizeConversation() async { + try { + if (_inProgressConversation != null) { + await updateConversation( + _inProgressConversation!.id, + isDraft: false, + ); + } + } catch (e) { + debugPrint('Error finalizing conversation: $e'); + } + } }