Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add voice recording feature (Issue #1953) #1963

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion app/lib/backend/schema/conversation.dart
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class CreateConversationResponse {
}
}

enum ConversationSource { friend, workflow, openglass, screenpipe, sdcard }
enum ConversationSource { friend, workflow, openglass, screenpipe, sdcard, phone }

class ConversationExternalData {
final String text;
Expand Down Expand Up @@ -193,6 +193,7 @@ class ServerConversation {
if (source == ConversationSource.screenpipe) return 'Screenpipe';
if (source == ConversationSource.openglass) return 'Openglass';
if (source == ConversationSource.sdcard) return 'SD Card';
if (source == ConversationSource.phone) return 'Phone Recording';
if (discarded) return 'Discarded';
return structured.category.substring(0, 1).toUpperCase() + structured.category.substring(1);
}
Expand Down
180 changes: 180 additions & 0 deletions app/lib/pages/capture/voice_recording_page.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
import 'dart:async';
import 'dart:io';

import 'package:flutter/material.dart';
import 'package:flutter_sound/flutter_sound.dart';
import 'package:friend_private/backend/schema/conversation.dart';
import 'package:friend_private/pages/conversation_detail/page.dart';
import 'package:friend_private/providers/capture_provider.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:provider/provider.dart';
import 'package:uuid/uuid.dart';

class VoiceRecordingPage extends StatefulWidget {
const VoiceRecordingPage({Key? key}) : super(key: key);

@override
State<VoiceRecordingPage> createState() => _VoiceRecordingPageState();
}

class _VoiceRecordingPageState extends State<VoiceRecordingPage> {
final FlutterSoundRecorder _recorder = FlutterSoundRecorder();
bool _isRecording = false;
String _recordingFilePath = '';
Timer? _recordingTimer;
int _recordingDuration = 0;
final scaffoldKey = GlobalKey<ScaffoldState>();

@override
void initState() {
super.initState();
_initRecorder();
}

Future<void> _initRecorder() async {
final status = await Permission.microphone.request();
if (status != PermissionStatus.granted) {
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(
content: Text('Microphone permission is required to record audio'),
),
);
return;
}

await _recorder.openRecorder();
_recorder.setSubscriptionDuration(const Duration(milliseconds: 500));
}

Future<void> _startRecording() async {
try {
// Get the temporary directory
final tempDir = await getTemporaryDirectory();
_recordingFilePath = '${tempDir.path}/voice_recording_${const Uuid().v4()}.wav';

// Start the recording
await _recorder.startRecorder(
toFile: _recordingFilePath,
codec: Codec.pcm16WAV,
);

// Start the recording timer
_recordingTimer = Timer.periodic(const Duration(seconds: 1), (timer) {
setState(() {
_recordingDuration++;
});
});

setState(() {
_isRecording = true;
});
} catch (e) {
debugPrint('Error starting recording: $e');
}
}

Future<void> _stopRecording() async {
try {
await _recorder.stopRecorder();
_recordingTimer?.cancel();

setState(() {
_isRecording = false;
});

// Process the recording
final captureProvider = Provider.of<CaptureProvider>(context, listen: false);
await captureProvider.processPhoneRecording(_recordingFilePath);

if (captureProvider.inProgressConversation != null) {
// Navigate to the conversation detail page
Navigator.pushReplacement(
context,
MaterialPageRoute(
builder: (context) => ConversationDetailPage(
conversation: captureProvider.inProgressConversation!,
),
),
);
}
} catch (e) {
debugPrint('Error stopping recording: $e');
}
}

String _formatDuration(int seconds) {
final minutes = seconds ~/ 60;
final remainingSeconds = seconds % 60;
return '${minutes.toString().padLeft(2, '0')}:${remainingSeconds.toString().padLeft(2, '0')}';
}

@override
void dispose() {
_recordingTimer?.cancel();
_recorder.closeRecorder();
super.dispose();
}

@override
Widget build(BuildContext context) {
return Scaffold(
key: scaffoldKey,
backgroundColor: Theme.of(context).colorScheme.primary,
appBar: AppBar(
backgroundColor: Theme.of(context).colorScheme.primary,
title: Row(
mainAxisSize: MainAxisSize.max,
mainAxisAlignment: MainAxisAlignment.spaceBetween,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
IconButton(
onPressed: () {
Navigator.pop(context);
},
icon: const Icon(Icons.arrow_back_rounded, size: 24.0),
),
const SizedBox(width: 4),
const Text("🎙️"),
const SizedBox(width: 4),
const Expanded(child: Text("Voice Recording")),
],
),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
if (_isRecording)
Text(
_formatDuration(_recordingDuration),
style: const TextStyle(fontSize: 48, color: Colors.white),
),
const SizedBox(height: 40),
Container(
height: 80,
width: 80,
decoration: BoxDecoration(
color: _isRecording ? Colors.red : Colors.white,
shape: BoxShape.circle,
),
child: IconButton(
icon: Icon(
_isRecording ? Icons.stop : Icons.mic,
size: 40,
color: _isRecording ? Colors.white : Colors.red,
),
onPressed: _isRecording ? _stopRecording : _startRecording,
),
),
const SizedBox(height: 40),
Text(
_isRecording ? "Tap to stop recording" : "Tap to start recording",
style: const TextStyle(fontSize: 16, color: Colors.white),
),
],
),
),
);
}
}
21 changes: 21 additions & 0 deletions app/lib/pages/home/page.dart
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ import 'package:upgrader/upgrader.dart';

import '../conversations/sync_page.dart';
import 'widgets/battery_info_widget.dart';
import 'package:friend_private/pages/capture/voice_recording_page.dart';

class HomePageWrapper extends StatefulWidget {
final String? navigateToRoute;
Expand Down Expand Up @@ -453,6 +454,26 @@ class _HomePageState extends State<HomePage> with WidgetsBindingObserver, Ticker
}
},
),
Positioned(
right: 20,
bottom: 80,
child: FloatingActionButton(
heroTag: 'voice_recording_button',
backgroundColor: Colors.red,
onPressed: () {
Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => const VoiceRecordingPage(),
),
);
},
child: const Icon(
Icons.mic,
color: Colors.white,
size: 24,
),
),
),
],
),
),
Expand Down
85 changes: 85 additions & 0 deletions app/lib/providers/capture_provider.dart
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:typed_data';

import 'package:flutter/material.dart';
Expand Down Expand Up @@ -911,4 +912,88 @@ class CaptureProvider extends ChangeNotifier
sdCardReady = value;
notifyListeners();
}

// Add this method for processing phone recordings
Future<void> processPhoneRecording(String filePath) async {
try {
debugPrint('Processing phone recording from: $filePath');
// Reset any previous state
await _resetStateVariables();

// Ensure the transcript service is ready
if (!_transcriptServiceReady) {
await _initiateWebsocket(force: true);
}

// Read the audio file
final audioFile = File(filePath);
if (!await audioFile.exists()) {
debugPrint('Audio file does not exist: $filePath');
return;
}

final audioBytes = await audioFile.readAsBytes();

// Create a conversation if not already created
if (_inProgressConversation == null) {
await _createConversation(ConversationSource.phone);
}

// Process the audio bytes in chunks to simulate streaming
const chunkSize = 4096; // Adjust based on your needs
for (var i = 0; i < audioBytes.length; i += chunkSize) {
final end = (i + chunkSize < audioBytes.length) ? i + chunkSize : audioBytes.length;
final chunk = audioBytes.sublist(i, end);

// Send the chunk to the websocket
_socket?.send(chunk);

// Small delay to simulate streaming
await Future.delayed(const Duration(milliseconds: 10));
}

// Wait for transcription to complete
await Future.delayed(const Duration(seconds: 2));

// Finalize the conversation
if (_inProgressConversation != null) {
await _finalizeConversation();
}

notifyListeners();
} catch (e) {
debugPrint('Error processing phone recording: $e');
}
}

// Helper method to create a conversation specifically for phone recordings
Future<void> _createConversation(ConversationSource source) async {
try {
final createdConversation = await createConversation(
title: 'Voice Recording ${DateTime.now().toString().split('.')[0]}',
source: source,
);

if (createdConversation != null) {
_inProgressConversation = createdConversation;
notifyListeners();
}
} catch (e) {
debugPrint('Error creating conversation: $e');
}
}

// Helper method to finalize the conversation
Future<void> _finalizeConversation() async {
try {
if (_inProgressConversation != null) {
await updateConversation(
_inProgressConversation!.id,
isDraft: false,
);
}
} catch (e) {
debugPrint('Error finalizing conversation: $e');
}
}
}