Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix still image capture on Big Sur #23

Merged
merged 1 commit into from
Oct 15, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 13 additions & 18 deletions PhotoStudioPlayer/CaptureSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ extension UserDefaults {
}
}

final class CaptureSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
final class CaptureSession: NSObject, AVCapturePhotoCaptureDelegate {
let session = AVCaptureSession()
private let input: AVCaptureDeviceInput
let previewLayer: AVCaptureVideoPreviewLayer
Expand Down Expand Up @@ -50,51 +50,46 @@ final class CaptureSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelega

// MARK: - Capture frame screenshots

private lazy var videoDataOutput: AVCaptureVideoDataOutput = {
let o = AVCaptureVideoDataOutput()
o.setSampleBufferDelegate(self, queue: videoDataQueue)
private lazy var photoOutput: AVCapturePhotoOutput = {
let o = AVCapturePhotoOutput()
return o
}()
private let videoDataQueue = DispatchQueue.global(qos: .userInitiated)
private let captureFolder: URL?
private var coreImageFilterForCapture: CIFilter?
var captureEnabled = false {
didSet {
if captureEnabled && session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
if captureEnabled && session.canAddOutput(photoOutput) {
session.addOutput(photoOutput)
}
if !captureEnabled {
session.removeOutput(videoDataOutput)
session.removeOutput(photoOutput)
}
}
}
private var numberOfCapturesNeeded = 0

func captureCurrentFrame() {
videoDataQueue.sync {numberOfCapturesNeeded += 1}
let settings = AVCapturePhotoSettings(format: [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]) // populates AVCapturePhoto.pixelBuffer
photoOutput.capturePhoto(with: settings, delegate: self)
}

func openCaptureFolder() {
guard let captureFolder = captureFolder else { return }
NSWorkspace.shared.open(captureFolder)
}

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let captureFolder = captureFolder else { return }
guard numberOfCapturesNeeded > 0 else { return }
numberOfCapturesNeeded -= 1

guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

let image = CIImage(cvImageBuffer: imageBuffer)
guard let pixelBuffer = photo.pixelBuffer else { return }
let image = CIImage(cvPixelBuffer: pixelBuffer)
coreImageFilterForCapture?.setValue(image, forKey: kCIInputImageKey)
guard let outputImage = coreImageFilterForCapture?.outputImage else { return }
let bitmap = NSBitmapImageRep(ciImage: outputImage)
let png = bitmap.representation(using: .png, properties: [:])
do {
try png?.write(to: captureFolder
.appendingPathComponent(UUID().uuidString)
.appendingPathExtension("png"))
.appendingPathComponent(UUID().uuidString)
.appendingPathExtension("png"))
} catch {
NSLog("%@", "\(error)")
}
Expand Down