From 85a8f6b7cf40113e9f3976377ea709813a04ce64 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 10:47:08 +0530 Subject: [PATCH 01/13] [mob][photos] MLController lower interaction times for now --- .../machine_learning/machine_learning_controller.dart | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mobile/lib/services/machine_learning/machine_learning_controller.dart b/mobile/lib/services/machine_learning/machine_learning_controller.dart index 65daf614ce..5b4097874e 100644 --- a/mobile/lib/services/machine_learning/machine_learning_controller.dart +++ b/mobile/lib/services/machine_learning/machine_learning_controller.dart @@ -3,6 +3,7 @@ import "dart:io"; import "package:battery_info/battery_info_plugin.dart"; import "package:battery_info/model/android_battery_info.dart"; +import "package:flutter/foundation.dart" show kDebugMode; import "package:logging/logging.dart"; import "package:photos/core/event_bus.dart"; import "package:photos/events/machine_learning_control_event.dart"; @@ -17,7 +18,8 @@ class MachineLearningController { static const kMaximumTemperature = 42; // 42 degree celsius static const kMinimumBatteryLevel = 20; // 20% - static const kDefaultInteractionTimeout = Duration(seconds: 15); + static const kDefaultInteractionTimeout = + kDebugMode ? Duration(seconds: 1) : Duration(seconds: 5); static const kUnhealthyStates = ["over_heat", "over_voltage", "dead"]; bool _isDeviceHealthy = true; From 7811c582140ef5e286616a833a56ccff2dd7cfce Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 11:08:51 +0530 Subject: [PATCH 02/13] [mob][photos] Inline --- .../services/machine_learning/machine_learning_controller.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mobile/lib/services/machine_learning/machine_learning_controller.dart b/mobile/lib/services/machine_learning/machine_learning_controller.dart index 5b4097874e..e79fb4be69 100644 --- a/mobile/lib/services/machine_learning/machine_learning_controller.dart +++ b/mobile/lib/services/machine_learning/machine_learning_controller.dart @@ -19,7 +19,7 @@ class MachineLearningController { static const kMaximumTemperature = 42; // 42 degree celsius static const kMinimumBatteryLevel = 20; // 20% static const kDefaultInteractionTimeout = - kDebugMode ? Duration(seconds: 1) : Duration(seconds: 5); + kDebugMode ? Duration(seconds: 3) : Duration(seconds: 5); static const kUnhealthyStates = ["over_heat", "over_voltage", "dead"]; bool _isDeviceHealthy = true; From 78afae401326d83d1bcd642ccdcc755734fa6811 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 11:40:22 +0530 Subject: [PATCH 03/13] [mob][photos] Lower file download limit --- .../lib/services/machine_learning/face_ml/face_ml_service.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index 9e72f4c55a..f6f3ef3210 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -96,7 +96,7 @@ class FaceMlService { bool isClusteringRunning = false; bool shouldSyncPeople = false; - final int _fileDownloadLimit = 15; + final int _fileDownloadLimit = 10; final int _embeddingFetchLimit = 200; Future init({bool initializeImageMlIsolate = false}) async { From 678efd1e8b3905d72f518d7aeea46a88de93270a Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 14:41:44 +0530 Subject: [PATCH 04/13] [mob][photos] Refactor of flags for faceMlService --- .../face_ml/face_ml_service.dart | 196 ++++++++++-------- .../debug/face_debug_section_widget.dart | 22 +- .../machine_learning_settings_page.dart | 2 +- 3 files changed, 123 insertions(+), 97 deletions(-) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index f6f3ef3210..639bccbb25 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -74,7 +74,7 @@ class FaceMlService { late ReceivePort _receivePort = ReceivePort(); late SendPort _mainSendPort; - bool isIsolateSpawned = false; + bool _isIsolateSpawned = false; // singleton pattern FaceMlService._privateConstructor(); @@ -89,12 +89,14 @@ class FaceMlService { final _computer = Computer.shared(); bool isInitialized = false; - late String client; + late final String client; - bool canRunMLController = false; - bool isImageIndexRunning = false; - bool isClusteringRunning = false; - bool shouldSyncPeople = false; + bool debugIndexingDisabled = false; + bool _mlControllerStatus = false; + bool _isIndexingOrClusteringRunning = false; + bool _shouldPauseIndexingAndClustering = false; + bool _shouldSyncPeople = false; + bool _isSyncing = false; final int _fileDownloadLimit = 10; final int _embeddingFetchLimit = 200; @@ -133,16 +135,16 @@ class FaceMlService { _logger.info("client: $client"); isInitialized = true; - canRunMLController = !Platform.isAndroid || kDebugMode; + _mlControllerStatus = !Platform.isAndroid; /// hooking FaceML into [MachineLearningController] - if (Platform.isAndroid && !kDebugMode) { + if (Platform.isAndroid) { Bus.instance.on().listen((event) { if (LocalSettings.instance.isFaceIndexingEnabled == false) { return; } - canRunMLController = event.shouldRun; - if (canRunMLController) { + _mlControllerStatus = event.shouldRun; + if (_mlControllerStatus) { _logger.info( "MLController allowed running ML, faces indexing starting", ); @@ -150,13 +152,11 @@ class FaceMlService { } else { _logger .info("MLController stopped running ML, faces indexing paused"); - pauseIndexing(); + pauseIndexingAndClustering(); } }); } else { - if (!kDebugMode) { - unawaited(indexAndClusterAll()); - } + unawaited(indexAndClusterAll()); } }); } @@ -167,22 +167,13 @@ class FaceMlService { void listenIndexOnDiffSync() { Bus.instance.on().listen((event) async { - if (LocalSettings.instance.isFaceIndexingEnabled == false || kDebugMode) { - return; - } - // [neeraj] intentional delay in starting indexing on diff sync, this gives time for the user - // to disable face-indexing in case it's causing crash. In the future, we - // should have a better way to handle this. - shouldSyncPeople = true; - Future.delayed(const Duration(seconds: 10), () { - unawaited(indexAndClusterAll()); - }); + unawaited(sync()); }); } void listenOnPeopleChangedSync() { Bus.instance.on().listen((event) { - shouldSyncPeople = true; + _shouldSyncPeople = true; }); } @@ -218,9 +209,9 @@ class FaceMlService { }); } - Future initIsolate() async { + Future _initIsolate() async { return _initLockIsolate.synchronized(() async { - if (isIsolateSpawned) return; + if (_isIsolateSpawned) return; _logger.info("initIsolate called"); _receivePort = ReceivePort(); @@ -231,19 +222,19 @@ class FaceMlService { _receivePort.sendPort, ); _mainSendPort = await _receivePort.first as SendPort; - isIsolateSpawned = true; + _isIsolateSpawned = true; _resetInactivityTimer(); } catch (e) { _logger.severe('Could not spawn isolate', e); - isIsolateSpawned = false; + _isIsolateSpawned = false; } }); } - Future ensureSpawnedIsolate() async { - if (!isIsolateSpawned) { - await initIsolate(); + Future _ensureSpawnedIsolate() async { + if (!_isIsolateSpawned) { + await _initIsolate(); } } @@ -286,11 +277,11 @@ class FaceMlService { Future _runInIsolate( (FaceMlOperation, Map) message, ) async { - await ensureSpawnedIsolate(); + await _ensureSpawnedIsolate(); return _functionLock.synchronized(() async { _resetInactivityTimer(); - if (isImageIndexRunning == false || canRunMLController == false) { + if (_shouldPauseIndexingAndClustering == false) { return null; } @@ -338,24 +329,31 @@ class FaceMlService { } void disposeIsolate() async { - if (!isIsolateSpawned) return; + if (!_isIsolateSpawned) return; await release(); - isIsolateSpawned = false; + _isIsolateSpawned = false; _isolate.kill(); _receivePort.close(); _inactivityTimer?.cancel(); } - Future indexAndClusterAll() async { - if (isClusteringRunning || isImageIndexRunning) { - _logger.info("indexing or clustering is already running, skipping"); + Future sync({bool forceSync = true}) async { + if (_isSyncing) { return; } - if (shouldSyncPeople) { + _isSyncing = true; + if (forceSync) { await PersonService.instance.reconcileClusters(); - shouldSyncPeople = false; + _shouldSyncPeople = false; } + _isSyncing = false; + } + + Future indexAndClusterAll() async { + if (_cannotRunMLFunction()) return; + + await sync(forceSync: _shouldSyncPeople); await indexAllImages(); final indexingCompleteRatio = await _getIndexedDoneRatio(); if (indexingCompleteRatio < 0.95) { @@ -368,35 +366,20 @@ class FaceMlService { } } + void pauseIndexingAndClustering() { + if (_isIndexingOrClusteringRunning) { + _shouldPauseIndexingAndClustering = true; + } + } + Future clusterAllImages({ double minFaceScore = kMinimumQualityFaceScore, bool clusterInBuckets = true, }) async { - if (!canRunMLController) { - _logger - .info("MLController does not allow running ML, skipping clustering"); - return; - } - if (isClusteringRunning) { - _logger.info("clusterAllImages is already running, skipping"); - return; - } - // verify faces is enabled - if (LocalSettings.instance.isFaceIndexingEnabled == false) { - _logger.warning("clustering is disabled by user"); - return; - } - - final indexingCompleteRatio = await _getIndexedDoneRatio(); - if (indexingCompleteRatio < 0.95) { - _logger.info( - "Indexing is not far enough, skipping clustering. Indexing is at $indexingCompleteRatio", - ); - return; - } + if (_cannotRunMLFunction()) return; _logger.info("`clusterAllImages()` called"); - isClusteringRunning = true; + _isIndexingOrClusteringRunning = true; final clusterAllImagesTime = DateTime.now(); try { @@ -441,7 +424,7 @@ class FaceMlService { int bucket = 1; while (true) { - if (!canRunMLController) { + if (_shouldPauseIndexingAndClustering) { _logger.info( "MLController does not allow running ML, stopping before clustering bucket $bucket", ); @@ -535,7 +518,8 @@ class FaceMlService { } catch (e, s) { _logger.severe("`clusterAllImages` failed", e, s); } finally { - isClusteringRunning = false; + _isIndexingOrClusteringRunning = false; + _shouldPauseIndexingAndClustering = false; } } @@ -543,17 +527,10 @@ class FaceMlService { /// /// This function first checks if the image has already been analyzed with the lastest faceMlVersion and stored in the database. If so, it skips the image. Future indexAllImages({int retryFetchCount = 10}) async { - if (isImageIndexRunning) { - _logger.warning("indexAllImages is already running, skipping"); - return; - } - // verify faces is enabled - if (LocalSettings.instance.isFaceIndexingEnabled == false) { - _logger.warning("indexing is disabled by user"); - return; - } + if (_cannotRunMLFunction()) return; + try { - isImageIndexRunning = true; + _isIndexingOrClusteringRunning = true; _logger.info('starting image indexing'); final w = (kDebugMode ? EnteWatch('prepare indexing files') : null) @@ -629,7 +606,7 @@ class FaceMlService { final List faces = []; final remoteFileIdToVersion = {}; for (FileMl fileMl in res.mlData.values) { - if (shouldDiscardRemoteEmbedding(fileMl)) continue; + if (_shouldDiscardRemoteEmbedding(fileMl)) continue; if (fileMl.faceEmbedding.faces.isEmpty) { faces.add( Face.empty( @@ -688,7 +665,7 @@ class FaceMlService { final smallerChunks = chunk.chunks(_fileDownloadLimit); for (final smallestChunk in smallerChunks) { for (final enteFile in smallestChunk) { - if (isImageIndexRunning == false) { + if (_shouldPauseIndexingAndClustering) { _logger.info("indexAllImages() was paused, stopping"); break outerLoop; } @@ -712,16 +689,17 @@ class FaceMlService { stopwatch.stop(); _logger.info( - "`indexAllImages()` finished. Analyzed $fileAnalyzedCount images, in ${stopwatch.elapsed.inSeconds} seconds (avg of ${stopwatch.elapsed.inSeconds / fileAnalyzedCount} seconds per image, skipped $fileSkippedCount images. MLController status: $canRunMLController)", + "`indexAllImages()` finished. Analyzed $fileAnalyzedCount images, in ${stopwatch.elapsed.inSeconds} seconds (avg of ${stopwatch.elapsed.inSeconds / fileAnalyzedCount} seconds per image, skipped $fileSkippedCount images. MLController status: $_mlControllerStatus)", ); } catch (e, s) { _logger.severe("indexAllImages failed", e, s); } finally { - isImageIndexRunning = false; + _isIndexingOrClusteringRunning = false; + _shouldPauseIndexingAndClustering = false; } } - bool shouldDiscardRemoteEmbedding(FileMl fileMl) { + bool _shouldDiscardRemoteEmbedding(FileMl fileMl) { if (fileMl.faceEmbedding.version < faceMlVersion) { debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} " "because version is ${fileMl.faceEmbedding.version} and we need $faceMlVersion"); @@ -861,10 +839,6 @@ class FaceMlService { } } - void pauseIndexing() { - isImageIndexRunning = false; - } - /// Analyzes the given image data by running the full pipeline for faces, using [analyzeImageSync] in the isolate. Future analyzeImageInSingleIsolate(EnteFile enteFile) async { _checkEnteFileForID(enteFile); @@ -1334,8 +1308,8 @@ class FaceMlService { _logger.warning( '''Skipped analysis of image with enteFile, it might be the wrong format or has no uploadedFileID, or MLController doesn't allow it to run. enteFile: ${enteFile.toString()} - isImageIndexRunning: $isImageIndexRunning - canRunML: $canRunMLController + isImageIndexRunning: $_isIndexingOrClusteringRunning + canRunML: $_mlControllerStatus ''', ); throw CouldNotRetrieveAnyFileData(); @@ -1361,7 +1335,8 @@ class FaceMlService { } bool _skipAnalysisEnteFile(EnteFile enteFile, Map indexedFileIds) { - if (isImageIndexRunning == false || canRunMLController == false) { + if (_isIndexingOrClusteringRunning == false || + _mlControllerStatus == false) { return true; } // Skip if the file is not uploaded or not owned by the user @@ -1378,4 +1353,49 @@ class FaceMlService { return indexedFileIds.containsKey(id) && indexedFileIds[id]! >= faceMlVersion; } + + bool _cannotRunMLFunction({String function = ""}) { + if (_isIndexingOrClusteringRunning) { + _logger.info( + "Cannot run $function because indexing or clustering is already running", + ); + _logStatus(); + return true; + } + if (_mlControllerStatus == false) { + _logger.info( + "Cannot run $function because MLController does not allow it", + ); + _logStatus(); + return true; + } + if (debugIndexingDisabled) { + _logger.info( + "Cannot run $function because debugIndexingDisabled is true", + ); + _logStatus(); + return true; + } + if (_shouldPauseIndexingAndClustering) { + // This should ideally not be triggered, because one of the above should be triggered instead. + _logger.warning( + "Cannot run $function because indexing and clustering is being paused", + ); + _logStatus(); + return true; + } + return false; + } + + void _logStatus() { + final String status = ''' + isInternalUser: ${flagService.internalUser} + isFaceIndexingEnabled: ${LocalSettings.instance.isFaceIndexingEnabled} + canRunMLController: $_mlControllerStatus + isIndexingOrClusteringRunning: $_isIndexingOrClusteringRunning + debugIndexingDisabled: $debugIndexingDisabled + shouldSyncPeople: $_shouldSyncPeople + '''; + _logger.info(status); + } } diff --git a/mobile/lib/ui/settings/debug/face_debug_section_widget.dart b/mobile/lib/ui/settings/debug/face_debug_section_widget.dart index 01b10ff80c..726a9f2ceb 100644 --- a/mobile/lib/ui/settings/debug/face_debug_section_widget.dart +++ b/mobile/lib/ui/settings/debug/face_debug_section_widget.dart @@ -79,7 +79,7 @@ class _FaceDebugSectionWidgetState extends State { final isEnabled = await LocalSettings.instance.toggleFaceIndexing(); if (!isEnabled) { - FaceMlService.instance.pauseIndexing(); + FaceMlService.instance.pauseIndexingAndClustering(); } if (mounted) { setState(() {}); @@ -107,7 +107,7 @@ class _FaceDebugSectionWidgetState extends State { setState(() {}); } } catch (e, s) { - _logger.warning('indexing failed ', e, s); + _logger.warning('Remote fetch toggle failed ', e, s); await showGenericErrorDialog(context: context, error: e); } }, @@ -115,22 +115,25 @@ class _FaceDebugSectionWidgetState extends State { sectionOptionSpacing, MenuItemWidget( captionedTextWidget: CaptionedTextWidget( - title: FaceMlService.instance.canRunMLController - ? "canRunML enabled" - : "canRunML disabled", + title: FaceMlService.instance.debugIndexingDisabled + ? "Debug enable indexing again" + : "Debug disable indexing", ), pressedColor: getEnteColorScheme(context).fillFaint, trailingIcon: Icons.chevron_right_outlined, trailingIconIsMuted: true, onTap: () async { try { - FaceMlService.instance.canRunMLController = - !FaceMlService.instance.canRunMLController; + FaceMlService.instance.debugIndexingDisabled = + !FaceMlService.instance.debugIndexingDisabled; + if (FaceMlService.instance.debugIndexingDisabled) { + FaceMlService.instance.pauseIndexingAndClustering(); + } if (mounted) { setState(() {}); } } catch (e, s) { - _logger.warning('canRunML toggle failed ', e, s); + _logger.warning('debugIndexingDisabled toggle failed ', e, s); await showGenericErrorDialog(context: context, error: e); } }, @@ -145,6 +148,7 @@ class _FaceDebugSectionWidgetState extends State { trailingIconIsMuted: true, onTap: () async { try { + FaceMlService.instance.debugIndexingDisabled = false; unawaited(FaceMlService.instance.indexAndClusterAll()); } catch (e, s) { _logger.warning('indexAndClusterAll failed ', e, s); @@ -162,6 +166,7 @@ class _FaceDebugSectionWidgetState extends State { trailingIconIsMuted: true, onTap: () async { try { + FaceMlService.instance.debugIndexingDisabled = false; unawaited(FaceMlService.instance.indexAllImages()); } catch (e, s) { _logger.warning('indexing failed ', e, s); @@ -189,6 +194,7 @@ class _FaceDebugSectionWidgetState extends State { onTap: () async { try { await PersonService.instance.storeRemoteFeedback(); + FaceMlService.instance.debugIndexingDisabled = false; await FaceMlService.instance .clusterAllImages(clusterInBuckets: true); Bus.instance.fire(PeopleChangedEvent()); diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart index 1e63cf6458..47e2166282 100644 --- a/mobile/lib/ui/settings/machine_learning_settings_page.dart +++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart @@ -208,7 +208,7 @@ class _MachineLearningSettingsPageState if (isEnabled) { unawaited(FaceMlService.instance.ensureInitialized()); } else { - FaceMlService.instance.pauseIndexing(); + FaceMlService.instance.pauseIndexingAndClustering(); } if (mounted) { setState(() {}); From ffc9eecbd148fac621c5bd099e812a7d56d309f1 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 14:45:16 +0530 Subject: [PATCH 05/13] [mob][photos] Move listeners inside init --- mobile/lib/main.dart | 2 - .../face_ml/face_ml_service.dart | 309 +++++++++--------- 2 files changed, 156 insertions(+), 155 deletions(-) diff --git a/mobile/lib/main.dart b/mobile/lib/main.dart index 6a42a0a3be..50de0b9a11 100644 --- a/mobile/lib/main.dart +++ b/mobile/lib/main.dart @@ -242,8 +242,6 @@ Future _init(bool isBackground, {String via = ''}) async { // unawaited(ObjectDetectionService.instance.init()); if (flagService.faceSearchEnabled) { unawaited(FaceMlService.instance.init()); - FaceMlService.instance.listenIndexOnDiffSync(); - FaceMlService.instance.listenOnPeopleChangedSync(); } else { if (LocalSettings.instance.isFaceIndexingEnabled) { unawaited(LocalSettings.instance.toggleFaceIndexing()); diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index 639bccbb25..7c742d033a 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -158,6 +158,9 @@ class FaceMlService { } else { unawaited(indexAndClusterAll()); } + + _listenIndexOnDiffSync(); + _listenOnPeopleChangedSync(); }); } @@ -165,13 +168,13 @@ class FaceMlService { OrtEnv.instance.init(); } - void listenIndexOnDiffSync() { + void _listenIndexOnDiffSync() { Bus.instance.on().listen((event) async { unawaited(sync()); }); } - void listenOnPeopleChangedSync() { + void _listenOnPeopleChangedSync() { Bus.instance.on().listen((event) { _shouldSyncPeople = true; }); @@ -372,157 +375,6 @@ class FaceMlService { } } - Future clusterAllImages({ - double minFaceScore = kMinimumQualityFaceScore, - bool clusterInBuckets = true, - }) async { - if (_cannotRunMLFunction()) return; - - _logger.info("`clusterAllImages()` called"); - _isIndexingOrClusteringRunning = true; - final clusterAllImagesTime = DateTime.now(); - - try { - // Get a sense of the total number of faces in the database - final int totalFaces = await FaceMLDataDB.instance - .getTotalFaceCount(minFaceScore: minFaceScore); - final fileIDToCreationTime = - await FilesDB.instance.getFileIDToCreationTime(); - final startEmbeddingFetch = DateTime.now(); - // read all embeddings - final result = await FaceMLDataDB.instance.getFaceInfoForClustering( - minScore: minFaceScore, - maxFaces: totalFaces, - ); - final Set missingFileIDs = {}; - final allFaceInfoForClustering = []; - for (final faceInfo in result) { - if (!fileIDToCreationTime.containsKey(faceInfo.fileID)) { - missingFileIDs.add(faceInfo.fileID); - } else { - allFaceInfoForClustering.add(faceInfo); - } - } - // sort the embeddings based on file creation time, oldest first - allFaceInfoForClustering.sort((a, b) { - return fileIDToCreationTime[a.fileID]! - .compareTo(fileIDToCreationTime[b.fileID]!); - }); - _logger.info( - 'Getting and sorting embeddings took ${DateTime.now().difference(startEmbeddingFetch).inMilliseconds} ms for ${allFaceInfoForClustering.length} embeddings' - 'and ${missingFileIDs.length} missing fileIDs', - ); - - // Get the current cluster statistics - final Map oldClusterSummaries = - await FaceMLDataDB.instance.getAllClusterSummary(); - - if (clusterInBuckets) { - const int bucketSize = 20000; - const int offsetIncrement = 7500; - int offset = 0; - int bucket = 1; - - while (true) { - if (_shouldPauseIndexingAndClustering) { - _logger.info( - "MLController does not allow running ML, stopping before clustering bucket $bucket", - ); - break; - } - if (offset > allFaceInfoForClustering.length - 1) { - _logger.warning( - 'faceIdToEmbeddingBucket is empty, this should ideally not happen as it should have stopped earlier. offset: $offset, totalFaces: $totalFaces', - ); - break; - } - if (offset > totalFaces) { - _logger.warning( - 'offset > totalFaces, this should ideally not happen. offset: $offset, totalFaces: $totalFaces', - ); - break; - } - - final bucketStartTime = DateTime.now(); - final faceInfoForClustering = allFaceInfoForClustering.sublist( - offset, - min(offset + bucketSize, allFaceInfoForClustering.length), - ); - - final clusteringResult = - await FaceClusteringService.instance.predictLinear( - faceInfoForClustering.toSet(), - fileIDToCreationTime: fileIDToCreationTime, - offset: offset, - oldClusterSummaries: oldClusterSummaries, - ); - if (clusteringResult == null) { - _logger.warning("faceIdToCluster is null"); - return; - } - - await FaceMLDataDB.instance - .updateFaceIdToClusterId(clusteringResult.newFaceIdToCluster); - await FaceMLDataDB.instance - .clusterSummaryUpdate(clusteringResult.newClusterSummaries!); - for (final faceInfo in faceInfoForClustering) { - faceInfo.clusterId ??= - clusteringResult.newFaceIdToCluster[faceInfo.faceID]; - } - for (final clusterUpdate - in clusteringResult.newClusterSummaries!.entries) { - oldClusterSummaries[clusterUpdate.key] = clusterUpdate.value; - } - _logger.info( - 'Done with clustering ${offset + faceInfoForClustering.length} embeddings (${(100 * (offset + faceInfoForClustering.length) / totalFaces).toStringAsFixed(0)}%) in bucket $bucket, offset: $offset, in ${DateTime.now().difference(bucketStartTime).inSeconds} seconds', - ); - if (offset + bucketSize >= totalFaces) { - _logger.info('All faces clustered'); - break; - } - offset += offsetIncrement; - bucket++; - } - } else { - final clusterStartTime = DateTime.now(); - // Cluster the embeddings using the linear clustering algorithm, returning a map from faceID to clusterID - final clusteringResult = - await FaceClusteringService.instance.predictLinear( - allFaceInfoForClustering.toSet(), - fileIDToCreationTime: fileIDToCreationTime, - oldClusterSummaries: oldClusterSummaries, - ); - if (clusteringResult == null) { - _logger.warning("faceIdToCluster is null"); - return; - } - final clusterDoneTime = DateTime.now(); - _logger.info( - 'done with clustering ${allFaceInfoForClustering.length} in ${clusterDoneTime.difference(clusterStartTime).inSeconds} seconds ', - ); - - // Store the updated clusterIDs in the database - _logger.info( - 'Updating ${clusteringResult.newFaceIdToCluster.length} FaceIDs with clusterIDs in the DB', - ); - await FaceMLDataDB.instance - .updateFaceIdToClusterId(clusteringResult.newFaceIdToCluster); - await FaceMLDataDB.instance - .clusterSummaryUpdate(clusteringResult.newClusterSummaries!); - _logger.info('Done updating FaceIDs with clusterIDs in the DB, in ' - '${DateTime.now().difference(clusterDoneTime).inSeconds} seconds'); - } - Bus.instance.fire(PeopleChangedEvent()); - _logger.info('clusterAllImages() finished, in ' - '${DateTime.now().difference(clusterAllImagesTime).inSeconds} seconds'); - } catch (e, s) { - _logger.severe("`clusterAllImages` failed", e, s); - } finally { - _isIndexingOrClusteringRunning = false; - _shouldPauseIndexingAndClustering = false; - } - } - /// Analyzes all the images in the database with the latest ml version and stores the results in the database. /// /// This function first checks if the image has already been analyzed with the lastest faceMlVersion and stored in the database. If so, it skips the image. @@ -699,6 +551,157 @@ class FaceMlService { } } + Future clusterAllImages({ + double minFaceScore = kMinimumQualityFaceScore, + bool clusterInBuckets = true, + }) async { + if (_cannotRunMLFunction()) return; + + _logger.info("`clusterAllImages()` called"); + _isIndexingOrClusteringRunning = true; + final clusterAllImagesTime = DateTime.now(); + + try { + // Get a sense of the total number of faces in the database + final int totalFaces = await FaceMLDataDB.instance + .getTotalFaceCount(minFaceScore: minFaceScore); + final fileIDToCreationTime = + await FilesDB.instance.getFileIDToCreationTime(); + final startEmbeddingFetch = DateTime.now(); + // read all embeddings + final result = await FaceMLDataDB.instance.getFaceInfoForClustering( + minScore: minFaceScore, + maxFaces: totalFaces, + ); + final Set missingFileIDs = {}; + final allFaceInfoForClustering = []; + for (final faceInfo in result) { + if (!fileIDToCreationTime.containsKey(faceInfo.fileID)) { + missingFileIDs.add(faceInfo.fileID); + } else { + allFaceInfoForClustering.add(faceInfo); + } + } + // sort the embeddings based on file creation time, oldest first + allFaceInfoForClustering.sort((a, b) { + return fileIDToCreationTime[a.fileID]! + .compareTo(fileIDToCreationTime[b.fileID]!); + }); + _logger.info( + 'Getting and sorting embeddings took ${DateTime.now().difference(startEmbeddingFetch).inMilliseconds} ms for ${allFaceInfoForClustering.length} embeddings' + 'and ${missingFileIDs.length} missing fileIDs', + ); + + // Get the current cluster statistics + final Map oldClusterSummaries = + await FaceMLDataDB.instance.getAllClusterSummary(); + + if (clusterInBuckets) { + const int bucketSize = 20000; + const int offsetIncrement = 7500; + int offset = 0; + int bucket = 1; + + while (true) { + if (_shouldPauseIndexingAndClustering) { + _logger.info( + "MLController does not allow running ML, stopping before clustering bucket $bucket", + ); + break; + } + if (offset > allFaceInfoForClustering.length - 1) { + _logger.warning( + 'faceIdToEmbeddingBucket is empty, this should ideally not happen as it should have stopped earlier. offset: $offset, totalFaces: $totalFaces', + ); + break; + } + if (offset > totalFaces) { + _logger.warning( + 'offset > totalFaces, this should ideally not happen. offset: $offset, totalFaces: $totalFaces', + ); + break; + } + + final bucketStartTime = DateTime.now(); + final faceInfoForClustering = allFaceInfoForClustering.sublist( + offset, + min(offset + bucketSize, allFaceInfoForClustering.length), + ); + + final clusteringResult = + await FaceClusteringService.instance.predictLinear( + faceInfoForClustering.toSet(), + fileIDToCreationTime: fileIDToCreationTime, + offset: offset, + oldClusterSummaries: oldClusterSummaries, + ); + if (clusteringResult == null) { + _logger.warning("faceIdToCluster is null"); + return; + } + + await FaceMLDataDB.instance + .updateFaceIdToClusterId(clusteringResult.newFaceIdToCluster); + await FaceMLDataDB.instance + .clusterSummaryUpdate(clusteringResult.newClusterSummaries!); + for (final faceInfo in faceInfoForClustering) { + faceInfo.clusterId ??= + clusteringResult.newFaceIdToCluster[faceInfo.faceID]; + } + for (final clusterUpdate + in clusteringResult.newClusterSummaries!.entries) { + oldClusterSummaries[clusterUpdate.key] = clusterUpdate.value; + } + _logger.info( + 'Done with clustering ${offset + faceInfoForClustering.length} embeddings (${(100 * (offset + faceInfoForClustering.length) / totalFaces).toStringAsFixed(0)}%) in bucket $bucket, offset: $offset, in ${DateTime.now().difference(bucketStartTime).inSeconds} seconds', + ); + if (offset + bucketSize >= totalFaces) { + _logger.info('All faces clustered'); + break; + } + offset += offsetIncrement; + bucket++; + } + } else { + final clusterStartTime = DateTime.now(); + // Cluster the embeddings using the linear clustering algorithm, returning a map from faceID to clusterID + final clusteringResult = + await FaceClusteringService.instance.predictLinear( + allFaceInfoForClustering.toSet(), + fileIDToCreationTime: fileIDToCreationTime, + oldClusterSummaries: oldClusterSummaries, + ); + if (clusteringResult == null) { + _logger.warning("faceIdToCluster is null"); + return; + } + final clusterDoneTime = DateTime.now(); + _logger.info( + 'done with clustering ${allFaceInfoForClustering.length} in ${clusterDoneTime.difference(clusterStartTime).inSeconds} seconds ', + ); + + // Store the updated clusterIDs in the database + _logger.info( + 'Updating ${clusteringResult.newFaceIdToCluster.length} FaceIDs with clusterIDs in the DB', + ); + await FaceMLDataDB.instance + .updateFaceIdToClusterId(clusteringResult.newFaceIdToCluster); + await FaceMLDataDB.instance + .clusterSummaryUpdate(clusteringResult.newClusterSummaries!); + _logger.info('Done updating FaceIDs with clusterIDs in the DB, in ' + '${DateTime.now().difference(clusterDoneTime).inSeconds} seconds'); + } + Bus.instance.fire(PeopleChangedEvent()); + _logger.info('clusterAllImages() finished, in ' + '${DateTime.now().difference(clusterAllImagesTime).inSeconds} seconds'); + } catch (e, s) { + _logger.severe("`clusterAllImages` failed", e, s); + } finally { + _isIndexingOrClusteringRunning = false; + _shouldPauseIndexingAndClustering = false; + } + } + bool _shouldDiscardRemoteEmbedding(FileMl fileMl) { if (fileMl.faceEmbedding.version < faceMlVersion) { debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} " From df756076e811c42bd3197f4e6675440c9f2928b3 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 14:52:13 +0530 Subject: [PATCH 06/13] [mob][photos] Small cleanup of FaceMlService --- .../face_ml/face_ml_service.dart | 171 +----------------- 1 file changed, 5 insertions(+), 166 deletions(-) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index 7c742d033a..a65e137d26 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -9,7 +9,6 @@ import "dart:ui" show Image; import "package:computer/computer.dart"; import "package:dart_ui_isolate/dart_ui_isolate.dart"; import "package:flutter/foundation.dart" show debugPrint, kDebugMode; -import "package:flutter_image_compress/flutter_image_compress.dart"; import "package:logging/logging.dart"; import "package:onnxruntime/onnxruntime.dart"; import "package:package_info_plus/package_info_plus.dart"; @@ -326,12 +325,12 @@ class FaceMlService { _logger.info( 'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.', ); - disposeIsolate(); + _disposeIsolate(); } }); } - void disposeIsolate() async { + void _disposeIsolate() async { if (!_isIsolateSpawned) return; await release(); @@ -750,7 +749,7 @@ class FaceMlService { ); try { - final FaceMlResult? result = await analyzeImageInSingleIsolate( + final FaceMlResult? result = await _analyzeImageInSingleIsolate( enteFile, // preferUsingThumbnailForEverything: false, // disposeImageIsolateAfterUse: false, @@ -843,7 +842,7 @@ class FaceMlService { } /// Analyzes the given image data by running the full pipeline for faces, using [analyzeImageSync] in the isolate. - Future analyzeImageInSingleIsolate(EnteFile enteFile) async { + Future _analyzeImageInSingleIsolate(EnteFile enteFile) async { _checkEnteFileForID(enteFile); await ensureInitialized(); @@ -1034,94 +1033,6 @@ class FaceMlService { return imagePath; } - @Deprecated('Deprecated in favor of `_getImagePathForML`') - Future _getDataForML( - EnteFile enteFile, { - FileDataForML typeOfData = FileDataForML.fileData, - }) async { - Uint8List? data; - - switch (typeOfData) { - case FileDataForML.fileData: - final stopwatch = Stopwatch()..start(); - final File? actualIoFile = await getFile(enteFile, isOrigin: true); - if (actualIoFile != null) { - data = await actualIoFile.readAsBytes(); - } - stopwatch.stop(); - _logger.info( - "Getting file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms", - ); - - break; - - case FileDataForML.thumbnailData: - final stopwatch = Stopwatch()..start(); - data = await getThumbnail(enteFile); - stopwatch.stop(); - _logger.info( - "Getting thumbnail data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms", - ); - break; - - case FileDataForML.compressedFileData: - final stopwatch = Stopwatch()..start(); - final String tempPath = Configuration.instance.getTempDirectory() + - "${enteFile.uploadedFileID!}"; - final File? actualIoFile = await getFile(enteFile); - if (actualIoFile != null) { - final compressResult = await FlutterImageCompress.compressAndGetFile( - actualIoFile.path, - tempPath + ".jpg", - ); - if (compressResult != null) { - data = await compressResult.readAsBytes(); - } - } - stopwatch.stop(); - _logger.info( - "Getting compressed file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms", - ); - break; - } - - return data; - } - - /// Detects faces in the given image data. - /// - /// `imageData`: The image data to analyze. - /// - /// Returns a list of face detection results. - /// - /// Throws [CouldNotInitializeFaceDetector], [CouldNotRunFaceDetector] or [GeneralFaceMlException] if something goes wrong. - Future> _detectFacesIsolate( - String imagePath, - // Uint8List fileData, - { - FaceMlResultBuilder? resultBuilder, - }) async { - try { - // Get the bounding boxes of the faces - final (List faces, dataSize) = - await FaceDetectionService.instance.predictInComputer(imagePath); - - // Add detected faces to the resultBuilder - if (resultBuilder != null) { - resultBuilder.addNewlyDetectedFaces(faces, dataSize); - } - - return faces; - } on YOLOFaceInterpreterInitializationException { - throw CouldNotInitializeFaceDetector(); - } on YOLOFaceInterpreterRunException { - throw CouldNotRunFaceDetector(); - } catch (e) { - _logger.severe('Face detection failed: $e'); - throw GeneralFaceMlException('Face detection failed: $e'); - } - } - /// Detects faces in the given image data. /// /// `imageData`: The image data to analyze. @@ -1160,38 +1071,6 @@ class FaceMlService { } } - /// Aligns multiple faces from the given image data. - /// - /// `imageData`: The image data in [Uint8List] that contains the faces. - /// `faces`: The face detection results in a list of [FaceDetectionAbsolute] for the faces to align. - /// - /// Returns a list of the aligned faces as image data. - /// - /// Throws [CouldNotWarpAffine] or [GeneralFaceMlException] if the face alignment fails. - Future _alignFaces( - String imagePath, - List faces, { - FaceMlResultBuilder? resultBuilder, - }) async { - try { - final (alignedFaces, alignmentResults, _, blurValues, _) = - await ImageMlIsolate.instance - .preprocessMobileFaceNetOnnx(imagePath, faces); - - if (resultBuilder != null) { - resultBuilder.addAlignmentResults( - alignmentResults, - blurValues, - ); - } - - return alignedFaces; - } catch (e, s) { - _logger.severe('Face alignment failed: $e', e, s); - throw CouldNotWarpAffine(); - } - } - /// Aligns multiple faces from the given image data. /// /// `imageData`: The image data in [Uint8List] that contains the faces. @@ -1233,45 +1112,6 @@ class FaceMlService { } } - /// Embeds multiple faces from the given input matrices. - /// - /// `facesMatrices`: The input matrices of the faces to embed. - /// - /// Returns a list of the face embeddings as lists of doubles. - /// - /// Throws [CouldNotInitializeFaceEmbeddor], [CouldNotRunFaceEmbeddor], [InputProblemFaceEmbeddor] or [GeneralFaceMlException] if the face embedding fails. - Future>> _embedFaces( - Float32List facesList, { - FaceMlResultBuilder? resultBuilder, - }) async { - try { - // Get the embedding of the faces - final List> embeddings = - await FaceEmbeddingService.instance.predictInComputer(facesList); - - // Add the embeddings to the resultBuilder - if (resultBuilder != null) { - resultBuilder.addEmbeddingsToExistingFaces(embeddings); - } - - return embeddings; - } on MobileFaceNetInterpreterInitializationException { - throw CouldNotInitializeFaceEmbeddor(); - } on MobileFaceNetInterpreterRunException { - throw CouldNotRunFaceEmbeddor(); - } on MobileFaceNetEmptyInput { - throw InputProblemFaceEmbeddor("Input is empty"); - } on MobileFaceNetWrongInputSize { - throw InputProblemFaceEmbeddor("Input size is wrong"); - } on MobileFaceNetWrongInputRange { - throw InputProblemFaceEmbeddor("Input range is wrong"); - // ignore: avoid_catches_without_on_clauses - } catch (e) { - _logger.severe('Face embedding (batch) failed: $e'); - throw GeneralFaceMlException('Face embedding (batch) failed: $e'); - } - } - static Future>> embedFacesSync( Float32List facesList, int interpreterAddress, { @@ -1311,10 +1151,9 @@ class FaceMlService { _logger.warning( '''Skipped analysis of image with enteFile, it might be the wrong format or has no uploadedFileID, or MLController doesn't allow it to run. enteFile: ${enteFile.toString()} - isImageIndexRunning: $_isIndexingOrClusteringRunning - canRunML: $_mlControllerStatus ''', ); + _logStatus(); throw CouldNotRetrieveAnyFileData(); } } From 92bafa7c384b434988b688bd260e5cf71d0c1006 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 15:19:07 +0530 Subject: [PATCH 07/13] [mob][photos] Temp fix for double assigned persons --- mobile/lib/services/search_service.dart | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mobile/lib/services/search_service.dart b/mobile/lib/services/search_service.dart index 5e21b03345..1ff73dbc89 100644 --- a/mobile/lib/services/search_service.dart +++ b/mobile/lib/services/search_service.dart @@ -848,8 +848,9 @@ class SearchService { final String clusterName = "$clusterId"; if (clusterIDToPersonID[clusterId] != null) { - throw Exception( - "Cluster $clusterId should not have person id ${clusterIDToPersonID[clusterId]}", + // This should not happen, means a faceID is assigned to multiple persons. + _logger.severe( + "`getAllFace`: Cluster $clusterId should not have person id ${clusterIDToPersonID[clusterId]}", ); } if (files.length < kMinimumClusterSizeSearchResult && From b3229785a05ef5a3ed3efeedef0b31b2651013b7 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 15:26:03 +0530 Subject: [PATCH 08/13] [mob][photos] Small fix --- .../lib/services/machine_learning/face_ml/face_ml_service.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index a65e137d26..a0f4f4dabe 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -88,7 +88,7 @@ class FaceMlService { final _computer = Computer.shared(); bool isInitialized = false; - late final String client; + late String client; bool debugIndexingDisabled = false; bool _mlControllerStatus = false; From 6d5436c8851c34e0e88ed9e824cf4fba50c5c397 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 15:50:14 +0530 Subject: [PATCH 09/13] [mob][photos] Hook iOS into MLController for temperature check only --- .../face_ml/face_ml_service.dart | 36 +++++++++---------- .../machine_learning_controller.dart | 34 +++++++++++++----- .../semantic_search_service.dart | 19 ++++------ 3 files changed, 48 insertions(+), 41 deletions(-) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index a0f4f4dabe..528d16b2bd 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -137,26 +137,22 @@ class FaceMlService { _mlControllerStatus = !Platform.isAndroid; /// hooking FaceML into [MachineLearningController] - if (Platform.isAndroid) { - Bus.instance.on().listen((event) { - if (LocalSettings.instance.isFaceIndexingEnabled == false) { - return; - } - _mlControllerStatus = event.shouldRun; - if (_mlControllerStatus) { - _logger.info( - "MLController allowed running ML, faces indexing starting", - ); - unawaited(indexAndClusterAll()); - } else { - _logger - .info("MLController stopped running ML, faces indexing paused"); - pauseIndexingAndClustering(); - } - }); - } else { - unawaited(indexAndClusterAll()); - } + Bus.instance.on().listen((event) { + if (LocalSettings.instance.isFaceIndexingEnabled == false) { + return; + } + _mlControllerStatus = event.shouldRun; + if (_mlControllerStatus) { + _logger.info( + "MLController allowed running ML, faces indexing starting", + ); + unawaited(indexAndClusterAll()); + } else { + _logger + .info("MLController stopped running ML, faces indexing paused"); + pauseIndexingAndClustering(); + } + }); _listenIndexOnDiffSync(); _listenOnPeopleChangedSync(); diff --git a/mobile/lib/services/machine_learning/machine_learning_controller.dart b/mobile/lib/services/machine_learning/machine_learning_controller.dart index e79fb4be69..852ebcd5b5 100644 --- a/mobile/lib/services/machine_learning/machine_learning_controller.dart +++ b/mobile/lib/services/machine_learning/machine_learning_controller.dart @@ -3,6 +3,7 @@ import "dart:io"; import "package:battery_info/battery_info_plugin.dart"; import "package:battery_info/model/android_battery_info.dart"; +import "package:battery_info/model/iso_battery_info.dart"; import "package:flutter/foundation.dart" show kDebugMode; import "package:logging/logging.dart"; import "package:photos/core/event_bus.dart"; @@ -33,13 +34,17 @@ class MachineLearningController { BatteryInfoPlugin() .androidBatteryInfoStream .listen((AndroidBatteryInfo? batteryInfo) { - _onBatteryStateUpdate(batteryInfo); + _onAndroidBatteryStateUpdate(batteryInfo); }); - } else { - // Always run Machine Learning on iOS - _canRunML = true; - Bus.instance.fire(MachineLearningControlEvent(true)); } + if (Platform.isIOS) { + BatteryInfoPlugin() + .iosBatteryInfoStream + .listen((IosBatteryInfo? batteryInfo) { + _oniOSBatteryStateUpdate(batteryInfo); + }); + } + _fireControlEvent(); } void onUserInteraction() { @@ -55,7 +60,8 @@ class MachineLearningController { } void _fireControlEvent() { - final shouldRunML = _isDeviceHealthy && !_isUserInteracting; + final shouldRunML = + _isDeviceHealthy && (Platform.isAndroid ? !_isUserInteracting : true); if (shouldRunML != _canRunML) { _canRunML = shouldRunML; _logger.info( @@ -78,18 +84,28 @@ class MachineLearningController { _startInteractionTimer(); } - void _onBatteryStateUpdate(AndroidBatteryInfo? batteryInfo) { + void _onAndroidBatteryStateUpdate(AndroidBatteryInfo? batteryInfo) { _logger.info("Battery info: ${batteryInfo!.toJson()}"); - _isDeviceHealthy = _computeIsDeviceHealthy(batteryInfo); + _isDeviceHealthy = _computeIsAndroidDeviceHealthy(batteryInfo); _fireControlEvent(); } - bool _computeIsDeviceHealthy(AndroidBatteryInfo info) { + void _oniOSBatteryStateUpdate(IosBatteryInfo? batteryInfo) { + _logger.info("Battery info: ${batteryInfo!.toJson()}"); + _isDeviceHealthy = _computeIsiOSDeviceHealthy(batteryInfo); + _fireControlEvent(); + } + + bool _computeIsAndroidDeviceHealthy(AndroidBatteryInfo info) { return _hasSufficientBattery(info.batteryLevel ?? kMinimumBatteryLevel) && _isAcceptableTemperature(info.temperature ?? kMaximumTemperature) && _isBatteryHealthy(info.health ?? ""); } + bool _computeIsiOSDeviceHealthy(IosBatteryInfo info) { + return _hasSufficientBattery(info.batteryLevel ?? kMinimumBatteryLevel); + } + bool _hasSufficientBattery(int batteryLevel) { return batteryLevel >= kMinimumBatteryLevel; } diff --git a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart index d85b4ceb5d..db1713c2c3 100644 --- a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart +++ b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart @@ -1,6 +1,5 @@ import "dart:async"; import "dart:collection"; -import "dart:io"; import "dart:math" show min; import "package:computer/computer.dart"; @@ -103,17 +102,13 @@ class SemanticSearchService { if (shouldSyncImmediately) { unawaited(sync()); } - if (Platform.isAndroid) { - Bus.instance.on().listen((event) { - if (event.shouldRun) { - _startIndexing(); - } else { - _pauseIndexing(); - } - }); - } else { - _startIndexing(); - } + Bus.instance.on().listen((event) { + if (event.shouldRun) { + _startIndexing(); + } else { + _pauseIndexing(); + } + }); } Future release() async { From e44be6358653762f9301a0d5a71a3c0977ee46cd Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 15:54:56 +0530 Subject: [PATCH 10/13] [mob][photos] Logs --- .../lib/services/machine_learning/face_ml/face_ml_service.dart | 1 + 1 file changed, 1 insertion(+) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index 528d16b2bd..34633a2019 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -1231,6 +1231,7 @@ class FaceMlService { isFaceIndexingEnabled: ${LocalSettings.instance.isFaceIndexingEnabled} canRunMLController: $_mlControllerStatus isIndexingOrClusteringRunning: $_isIndexingOrClusteringRunning + shouldPauseIndexingAndClustering: $_shouldPauseIndexingAndClustering debugIndexingDisabled: $debugIndexingDisabled shouldSyncPeople: $_shouldSyncPeople '''; From e4c379963f8f20d468c7e70a0a9fc7bd2b4e120e Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 16:04:26 +0530 Subject: [PATCH 11/13] [mob][photos] Logging --- .../machine_learning/face_ml/face_ml_service.dart | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index 34633a2019..af03577c94 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -356,7 +356,7 @@ class FaceMlService { final indexingCompleteRatio = await _getIndexedDoneRatio(); if (indexingCompleteRatio < 0.95) { _logger.info( - "Indexing is not far enough, skipping clustering. Indexing is at $indexingCompleteRatio", + "Indexing is not far enough to start clustering, skipping clustering. Indexing is at $indexingCompleteRatio", ); return; } else { @@ -443,13 +443,10 @@ class FaceMlService { for (final f in chunk) { fileIds.add(f.uploadedFileID!); } - final EnteWatch? w = - flagService.internalUser ? EnteWatch("face_em_fetch") : null; - w?.start(); - w?.log('starting remote fetch for ${fileIds.length} files'); + _logger.info('starting remote fetch for ${fileIds.length} files'); final res = await RemoteFileMLService.instance.getFilessEmbedding(fileIds); - w?.logAndReset('fetched ${res.mlData.length} embeddings'); + _logger.info('fetched ${res.mlData.length} embeddings'); final List faces = []; final remoteFileIdToVersion = {}; for (FileMl fileMl in res.mlData.values) { @@ -483,7 +480,7 @@ class FaceMlService { } await FaceMLDataDB.instance.bulkInsertFaces(faces); - w?.logAndReset('stored embeddings'); + _logger.info('stored embeddings'); for (final entry in remoteFileIdToVersion.entries) { alreadyIndexedFiles[entry.key] = entry.value; } From d12f570178a3f6c494856f552a6e78774163d958 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 16:15:30 +0530 Subject: [PATCH 12/13] [mob][photos] Logging --- .../machine_learning/face_ml/face_ml_service.dart | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index af03577c94..2aabf7f2b9 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -144,12 +144,12 @@ class FaceMlService { _mlControllerStatus = event.shouldRun; if (_mlControllerStatus) { _logger.info( - "MLController allowed running ML, faces indexing starting", + "MLController allowed running ML, faces indexing starting (unless it's already fetching embeddings)", ); unawaited(indexAndClusterAll()); } else { _logger - .info("MLController stopped running ML, faces indexing paused"); + .info("MLController stopped running ML, faces indexing will be paused (unless it's fetching embeddings)"); pauseIndexingAndClustering(); } }); @@ -432,6 +432,7 @@ class FaceMlService { w?.log('preparing all files to index'); final List> chunks = sortedBylocalID.chunks(_embeddingFetchLimit); + int fetchedCount = 0; outerLoop: for (final chunk in chunks) { final futures = >[]; @@ -447,6 +448,7 @@ class FaceMlService { final res = await RemoteFileMLService.instance.getFilessEmbedding(fileIds); _logger.info('fetched ${res.mlData.length} embeddings'); + fetchedCount += res.mlData.length; final List faces = []; final remoteFileIdToVersion = {}; for (FileMl fileMl in res.mlData.values) { @@ -533,7 +535,7 @@ class FaceMlService { stopwatch.stop(); _logger.info( - "`indexAllImages()` finished. Analyzed $fileAnalyzedCount images, in ${stopwatch.elapsed.inSeconds} seconds (avg of ${stopwatch.elapsed.inSeconds / fileAnalyzedCount} seconds per image, skipped $fileSkippedCount images. MLController status: $_mlControllerStatus)", + "`indexAllImages()` finished. Fetched $fetchedCount and analyzed $fileAnalyzedCount images, in ${stopwatch.elapsed.inSeconds} seconds (avg of ${stopwatch.elapsed.inSeconds / fileAnalyzedCount} seconds per image, skipped $fileSkippedCount images. MLController status: $_mlControllerStatus)", ); } catch (e, s) { _logger.severe("indexAllImages failed", e, s); From d477b5507137e3c9a8696e3164a07c1cb3b14ef7 Mon Sep 17 00:00:00 2001 From: laurenspriem Date: Wed, 22 May 2024 16:23:15 +0530 Subject: [PATCH 13/13] [mob][photos] Bump --- mobile/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index b9d5345c39..0c9eba213f 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -12,7 +12,7 @@ description: ente photos application # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 0.8.98+618 +version: 0.8.101+624 publish_to: none environment: