From e2487bbdf6da4f4bb9c6c0d4d0d28cb10bee6693 Mon Sep 17 00:00:00 2001 From: Jason McGhee Date: Sun, 31 Dec 2023 02:09:50 -0800 Subject: [PATCH] careful profiling, fixing timeline loading, auto-pause recording on open timeline --- rem.xcodeproj/project.pbxproj | 4 + rem/DB.swift | 10 ++- rem/ImageHelper.swift | 49 +++++++++++++ rem/TextMerger.swift | 3 - rem/TimelineView.swift | 133 ++++++++++++++++------------------ rem/remApp.swift | 37 ++++++---- 6 files changed, 145 insertions(+), 91 deletions(-) create mode 100644 rem/ImageHelper.swift diff --git a/rem.xcodeproj/project.pbxproj b/rem.xcodeproj/project.pbxproj index bb8d231..3822a7d 100644 --- a/rem.xcodeproj/project.pbxproj +++ b/rem.xcodeproj/project.pbxproj @@ -16,6 +16,7 @@ 961C95F82B2E19B40093F228 /* remUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 961C95F72B2E19B40093F228 /* remUITestsLaunchTests.swift */; }; 961C96132B2EB7DB0093F228 /* TimelineView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 961C96122B2EB7DB0093F228 /* TimelineView.swift */; }; 961C96152B2EBEE50093F228 /* DB.swift in Sources */ = {isa = PBXBuildFile; fileRef = 961C96142B2EBEE50093F228 /* DB.swift */; }; + 9670E1352B41683B005728F5 /* ImageHelper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9670E1342B41683B005728F5 /* ImageHelper.swift */; }; 969BA2EC2B3D1D46009EE9C6 /* SettingsManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 969BA2EB2B3D1D46009EE9C6 /* SettingsManager.swift */; }; 969F3EFF2B3A8C4D0085787B /* HotKey in Frameworks */ = {isa = PBXBuildFile; productRef = 969F3EFE2B3A8C4D0085787B /* HotKey */; }; 969F3F082B3B7C7C0085787B /* RemFileManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 969F3F072B3B7C7C0085787B /* RemFileManager.swift */; }; @@ -204,6 +205,7 @@ 961C960D2B2E73840093F228 /* ffmpeg */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.executable"; path = ffmpeg; sourceTree = ""; }; 961C96122B2EB7DB0093F228 /* TimelineView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TimelineView.swift; sourceTree = ""; }; 961C96142B2EBEE50093F228 /* DB.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DB.swift; sourceTree = ""; }; + 9670E1342B41683B005728F5 /* ImageHelper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageHelper.swift; sourceTree = ""; }; 969BA2EB2B3D1D46009EE9C6 /* SettingsManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsManager.swift; sourceTree = ""; }; 969F3F072B3B7C7C0085787B /* RemFileManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RemFileManager.swift; sourceTree = ""; }; 969F3F092B3B7F760085787B /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; path = Info.plist; sourceTree = ""; }; @@ -298,6 +300,7 @@ 969F3F0C2B3CCEC30085787B /* Ask.swift */, 969BA2EB2B3D1D46009EE9C6 /* SettingsManager.swift */, 96DBA3E62B403ED90000CFBE /* Timings.swift */, + 9670E1342B41683B005728F5 /* ImageHelper.swift */, ); path = rem; sourceTree = ""; @@ -664,6 +667,7 @@ 969BA2EC2B3D1D46009EE9C6 /* SettingsManager.swift in Sources */, 969F3F0D2B3CCEC30085787B /* Ask.swift in Sources */, 96DBA3E72B403ED90000CFBE /* Timings.swift in Sources */, + 9670E1352B41683B005728F5 /* ImageHelper.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/rem/DB.swift b/rem/DB.swift index adbef0a..f815da3 100644 --- a/rem/DB.swift +++ b/rem/DB.swift @@ -10,12 +10,17 @@ import AVFoundation import Foundation import SQLite import Vision +import os class DatabaseManager { static let shared = DatabaseManager() private var db: Connection static var FPS: CMTimeScale = 25 + private let logger = Logger( + subsystem: Bundle.main.bundleIdentifier!, + category: String(describing: DatabaseManager.self) + ) // Last 15 frames let recentFramesThreshold = 15 @@ -132,6 +137,7 @@ class DatabaseManager { } func insertFrame(activeApplicationName: String?) -> Int64 { + // logger.debug("inserting frame: \(self.lastFrameId + 1) at offset: \(self.currentFrameOffset)") let insert = frames.insert(chunkId <- currentChunkId, timestamp <- Date(), offsetIndex <- currentFrameOffset, self.activeApplicationName <- activeApplicationName) let id = try! db.run(insert) currentFrameOffset += 1 @@ -151,8 +157,8 @@ class DatabaseManager { return (frame[offsetIndex], frame[filePath]) } - // let justFrameQuery = frames.filter(frames[id] === index).limit(1) - // try! db.run(justFrameQuery.delete()) + // let justFrameQuery = frames.filter(frames[id] === index).limit(1) + // try! db.run(justFrameQuery.delete()) } catch { return nil } diff --git a/rem/ImageHelper.swift b/rem/ImageHelper.swift new file mode 100644 index 0000000..c6de7ec --- /dev/null +++ b/rem/ImageHelper.swift @@ -0,0 +1,49 @@ +// +// ImageHelper.swift +// rem +// +// Created by Jason McGhee on 12/31/23. +// + +import Foundation +import os +import SwiftUI + +class ImageHelper { + private static let logger = Logger( + subsystem: Bundle.main.bundleIdentifier!, + category: String(describing: ImageHelper.self) + ) + + // Useful for debugging... + static func pngData(from nsImage: NSImage) -> Data? { + guard let tiffRepresentation = nsImage.tiffRepresentation, + let bitmapImage = NSBitmapImageRep(data: tiffRepresentation) else { + logger.error("Failed to get TIFF representation of NSImage") + return nil + } + + guard let pngData = bitmapImage.representation(using: .png, properties: [:]) else { + logger.error("Failed to convert NSImage to PNG") + return nil + } + + return pngData + } + + static func saveNSImage(image: NSImage, path: String) { + let pngData = pngData(from: image) + do { + if let savedir = RemFileManager.shared.getSaveDir() { + let outputPath = savedir.appendingPathComponent("\(path).png").path + let fileURL = URL(fileURLWithPath: outputPath) + try pngData?.write(to: fileURL) + logger.info("PNG file written successfully") + } else { + logger.error("Error writing PNG file") + } + } catch { + logger.error("Error writing PNG file: \(error)") + } + } +} diff --git a/rem/TextMerger.swift b/rem/TextMerger.swift index 71b6a47..6f3f17c 100644 --- a/rem/TextMerger.swift +++ b/rem/TextMerger.swift @@ -11,9 +11,6 @@ class TextMerger { static let shared = TextMerger() func mergeTexts(texts: [String]) -> String { - if texts.count == 1 { - return texts[0] - } var mergedText: String = "" var linesSeen = Set() for text in texts { diff --git a/rem/TimelineView.swift b/rem/TimelineView.swift index 10336ba..2de62e1 100644 --- a/rem/TimelineView.swift +++ b/rem/TimelineView.swift @@ -11,8 +11,6 @@ struct TimelineView: View { ) @ObservedObject var viewModel: TimelineViewModel @State private var imageAnalysis: ImageAnalysis? - @State private var frame: NSRect - @State private var lastAnalyzedIndex: Int64 = -1 // To track the last analyzed index @State var customHostingView: CustomHostingView? private var ocrDebouncer = Debouncer(delay: 1.0) @@ -29,12 +27,12 @@ struct TimelineView: View { self.viewModel = viewModel self.settingsManager = settingsManager self.onClose = onClose - _frame = State(initialValue: NSScreen.main?.visibleFrame ?? NSRect.zero) _customHostingView = State(initialValue: nil) } var body: some View { ZStack { + let frame = NSScreen.main?.frame ?? NSRect.zero let image = DatabaseManager.shared.getImage(index: viewModel.currentFrameIndex) let nsImage = image.flatMap { NSImage(cgImage: $0, size: NSSize(width: $0.width, height: $0.height)) } @@ -42,8 +40,9 @@ struct TimelineView: View { settingsManager: settingsManager, onClose: onClose, image: nsImage, - analysis: $imageAnalysis, - frame: frame + analysis: imageAnalysis, + frame: frame, + timelineOpen: viewModel.timelineOpen ) .frame(width: frame.width, height: frame.height) .ignoresSafeArea(.all) @@ -54,6 +53,15 @@ struct TimelineView: View { analyzeCurrentImage() } + if image == nil { + VStack(alignment: .center) { + Text("Nothing to remember, or missing frame (if missing, sorry, still alpha!)") + .padding() + .background(RoundedRectangle(cornerRadius: 10) + .fill(Color.white.opacity(0.1))) + } + } + } .ignoresSafeArea(.all) } @@ -80,39 +88,6 @@ struct TimelineView: View { } } } - - - // Useful for debugging... - func pngData(from nsImage: NSImage) -> Data? { - guard let tiffRepresentation = nsImage.tiffRepresentation, - let bitmapImage = NSBitmapImageRep(data: tiffRepresentation) else { - logger.error("Failed to get TIFF representation of NSImage") - return nil - } - - guard let pngData = bitmapImage.representation(using: .png, properties: [:]) else { - logger.error("Failed to convert NSImage to PNG") - return nil - } - - return pngData - } - - func saveNSImage(image: NSImage, path: String) { - let pngData = pngData(from: image) - do { - if let savedir = RemFileManager.shared.getSaveDir() { - let outputPath = savedir.appendingPathComponent("\(path).png").path - let fileURL = URL(fileURLWithPath: outputPath) - try pngData?.write(to: fileURL) - logger.info("PNG file written successfully") - } else { - logger.error("Error writing PNG file") - } - } catch { - logger.error("Error writing PNG file: \(error)") - } - } } class CustomHostingView: NSHostingView { @@ -142,11 +117,11 @@ class CustomHostingView: NSHostingView { private func configureImageView(with image: NSImage, in frame: NSRect) { imageView.image = image + imageView.imageScaling = .scaleAxesIndependently // Configuring frame to account for the offset and scaling - let adjustedFrame = CGRect(x: 0, y: -24, width: frame.width, height: frame.height + 24) - imageView.frame = adjustedFrame + imageView.frame = CGRect(x: 0, y: 0, width: frame.width, height: frame.height) } private func setupOverlayView() { @@ -169,8 +144,9 @@ struct CustomHostingControllerRepresentable: NSViewControllerRepresentable { var settingsManager: SettingsManager var onClose: () -> Void var image: NSImage? - @Binding var analysis: ImageAnalysis? + var analysis: ImageAnalysis? var frame: NSRect + var timelineOpen: Bool func makeNSViewController(context: Context) -> CustomHostingViewController { let viewController = CustomHostingViewController() @@ -181,7 +157,9 @@ struct CustomHostingControllerRepresentable: NSViewControllerRepresentable { } func updateNSViewController(_ nsViewController: CustomHostingViewController, context: Context) { - nsViewController.updateContent(image: image, frame: frame, analysis: analysis) + if timelineOpen { + nsViewController.updateContent(image: image, frame: frame, analysis: analysis) + } nsViewController.onClose = onClose nsViewController.settingsManager = settingsManager } @@ -189,12 +167,15 @@ struct CustomHostingControllerRepresentable: NSViewControllerRepresentable { class CustomHostingViewController: NSViewController { var settingsManager: SettingsManager? - var onClose: (() -> Void)? // Closure to handle thumbnail click + var onClose: (() -> Void)? var customHostingView: CustomHostingView? var interceptingView: CustomInterceptingView? + var hadImage: Bool = false override func viewWillAppear() { - view.enterFullScreenMode(NSScreen.main!) + DispatchQueue.main.async { + self.view.window?.makeKey() + } } override func loadView() { @@ -211,44 +192,48 @@ class CustomHostingViewController: NSViewController { interceptingView = _interceptingView } - func updateImage(_ image: NSImage?, frame: NSRect) { - if let image = image { - // Image available: update or create CustomHostingView with the image - if customHostingView == nil { - customHostingView = CustomHostingView(image: image, frame: frame) - customHostingView?.frame = CGRect(origin: .zero, size: frame.size) - view.addSubview(customHostingView!) - } else { - customHostingView?.updateImage(image) - } + func updateImage(_ image: NSImage, frame: NSRect) { + // Image available: update or create CustomHostingView with the image + if customHostingView == nil { + customHostingView = CustomHostingView(image: image, frame: frame) customHostingView?.frame = CGRect(origin: .zero, size: frame.size) + view.addSubview(customHostingView!) } else { - // Image not available: Display VisualEffectView - displayVisualEffectView() + customHostingView?.updateImage(image) } + customHostingView?.frame = CGRect(origin: .zero, size: frame.size) } func updateContent(image: NSImage?, frame: NSRect, analysis: ImageAnalysis?) { - if let image = image { - // Image is available - updateImage(image, frame: frame) + if let im = image { + if !view.isInFullScreenMode { + DispatchQueue.main.async { + self.view.enterFullScreenMode(NSScreen.main!) + } + } + updateImage(im, frame: frame) updateAnalysis(analysis) + hadImage = true } else { - // Image is not available, display VisualEffectView + if view.isInFullScreenMode { + DispatchQueue.main.async { + self.view.exitFullScreenMode() + } + } displayVisualEffectView() + hadImage = false } } private func displayVisualEffectView() { - // Ensure previous content is removed - view.subviews.forEach { $0.removeFromSuperview() } + interceptingView?.subviews.forEach { $0.removeFromSuperview() } - let visualEffectView = VisualEffectView(material: .hudWindow, blendingMode: .behindWindow) - .frame(maxWidth: .infinity, maxHeight: .infinity) - NSHostingController(rootView: visualEffectView) - .view - .frame = view.bounds - view.addSubview(NSHostingController(rootView: visualEffectView).view) + let visualEffectView = NSVisualEffectView() + visualEffectView.material = .hudWindow + visualEffectView.blendingMode = .behindWindow + visualEffectView.frame = interceptingView?.bounds ?? NSRect.zero + + interceptingView?.addSubview(visualEffectView) } func updateAnalysis(_ analysis: ImageAnalysis?) { @@ -300,6 +285,7 @@ class TimelineViewModel: ObservableObject { private var speedFactor: Double = 0.05 // Adjust this factor based on UX requirements @Published var currentFrameContinuous: Double = 0.0 @Published var currentFrameIndex: Int64 = 0 + @Published var timelineOpen: Bool = false private var indexUpdateThrottle = Throttler(delay: 0.05) init() { @@ -326,8 +312,11 @@ class TimelineViewModel: ObservableObject { } func setIndexToLatest() { - self.currentFrameContinuous = Double(DatabaseManager.shared.getMaxFrame()) - self.currentFrameIndex = Int64(currentFrameContinuous) + let maxFrame = DatabaseManager.shared.getMaxFrame() + DispatchQueue.main.async { + self.currentFrameContinuous = Double(maxFrame) + self.currentFrameIndex = maxFrame + } } func updateIndexSafely() { @@ -336,4 +325,8 @@ class TimelineViewModel: ObservableObject { self.currentFrameIndex = rounded } } + + func setIsOpen(isOpen: Bool) { + timelineOpen = isOpen + } } diff --git a/rem/remApp.swift b/rem/remApp.swift index 3ae96f8..465fd31 100644 --- a/rem/remApp.swift +++ b/rem/remApp.swift @@ -40,6 +40,8 @@ class AppDelegate: NSObject, NSApplicationDelegate { subsystem: Bundle.main.bundleIdentifier!, category: String(describing: AppDelegate.self) ) + + var imageAnalyzer = ImageAnalyzer() var timelineViewWindow: NSWindow? var timelineView: TimelineView? @@ -204,7 +206,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { if self.isTimelineOpen() { self.closeTimelineView() } else { - let frame = DatabaseManager.shared.getLastAccessibleFrame() + let frame = DatabaseManager.shared.getMaxFrame() self.showTimelineView(with: frame) } } @@ -298,13 +300,6 @@ class AppDelegate: NSObject, NSApplicationDelegate { guard let image = CGDisplayCreateImage(display.displayID, rect: display.frame) else { return } let frameId = DatabaseManager.shared.insertFrame(activeApplicationName: activeApplicationName) - DispatchQueue.main.async { - if (!self.isTimelineOpen()) { - // Make sure to set timeline to be the latest frame - self.timelineView?.viewModel.setIndexToLatest() - } - } - await processScreenshot(frameId: frameId, image: image, frame: display.frame) screenshotQueue.asyncAfter(deadline: .now() + 2) { [weak self] in @@ -335,13 +330,11 @@ class AppDelegate: NSObject, NSApplicationDelegate { func stopScreenCapture() { isCapturing = .stopped - self.timelineView?.viewModel.setIndexToLatest() logger.info("Screen capture stopped") } func pauseScreenCapture() { isCapturing = .paused - self.timelineView?.viewModel.setIndexToLatest() logger.info("Screen capture paused") } @@ -476,14 +469,18 @@ class AppDelegate: NSObject, NSApplicationDelegate { imageBufferQueue.sync { [weak self] in guard let strongSelf = self else { return } - if !strongSelf.imageDataBuffer.isEmpty { - let buffer = strongSelf.imageDataBuffer - if !strongSelf.imageDataBuffer.isEmpty { - strongSelf.processChunk(strongSelf.imageDataBuffer) - } + // Move the images to a temporary buffer if the threshold is reached + let tempBuffer: [Data] = Array(strongSelf.imageDataBuffer.prefix(strongSelf.frameThreshold)) + strongSelf.imageDataBuffer.removeAll() + + // Process the images outside of the critical section + if !tempBuffer.isEmpty { + strongSelf.processChunk(tempBuffer) } } + self.timelineView?.viewModel.setIndexToLatest() + setupMenu() } @@ -507,7 +504,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { do { let configuration = ImageAnalyzer.Configuration([.text]) let nsImage = NSImage(cgImage: image, size: NSSize(width: image.width, height: image.height)) - let analysis = try await ImageAnalyzer().analyze(nsImage, orientation: CGImagePropertyOrientation.up, configuration: configuration) + let analysis = try await self.imageAnalyzer.analyze(nsImage, orientation: CGImagePropertyOrientation.up, configuration: configuration) let textToAssociate = analysis.transcript var texts = [textToAssociate] if self.settingsManager.settings.saveEverythingCopiedToClipboard { @@ -562,6 +559,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } @objc func showTimelineView(with index: Int64) { + pauseRecording() closeSearchView() if timelineViewWindow == nil { let screenRect = NSScreen.main?.frame ?? NSRect.zero @@ -584,12 +582,14 @@ class AppDelegate: NSObject, NSApplicationDelegate { timelineView?.viewModel.updateIndex(withIndex: index) timelineViewWindow?.contentView = NSHostingView(rootView: timelineView) + timelineView?.viewModel.setIsOpen(isOpen: true) timelineViewWindow?.makeKeyAndOrderFront(nil) DispatchQueue.main.async { self.timelineViewWindow?.orderFrontRegardless() // Ensure it comes to the front } } else if (!self.isTimelineOpen()) { timelineView?.viewModel.updateIndex(withIndex: index) + timelineView?.viewModel.setIsOpen(isOpen: true) timelineViewWindow?.makeKeyAndOrderFront(nil) DispatchQueue.main.async { self.timelineViewWindow?.orderFrontRegardless() // Ensure it comes to the front @@ -613,9 +613,14 @@ class AppDelegate: NSObject, NSApplicationDelegate { func closeTimelineView() { timelineViewWindow?.isReleasedWhenClosed = false timelineViewWindow?.close() + timelineView?.viewModel.setIsOpen(isOpen: false) + if isCapturing == .paused { + enableRecording() + } } @objc func showSearchView() { + pauseRecording() closeTimelineView() // Ensure that the search view window is created and shown if searchViewWindow == nil {