macdriver
macdriver copied to clipboard
Framework Request: ScreenCaptureKit
As the macOS 12.3, ScreenCaptureKit has been released as the best replacement for the screen recording. It would be a great upgrade for screen capture and audio recording for the desktop application.
import SwiftUI
import ScreenCaptureKit
import AVFoundation
import CoreMedia
import QuartzCore
// MARK: - Screen Recorder Class
class ScreenRecorder: NSObject, ObservableObject, SCStreamDelegate, SCStreamOutput {
@Published var isRecording = false
@Published var errorMessage: String?
private var stream: SCStream?
private var assetWriter: AVAssetWriter?
private var assetWriterVideoInput: AVAssetWriterInput?
private var assetWriterAudioInput: AVAssetWriterInput?
private var outputURL: URL
private let serialQueue = DispatchQueue(label: "com.example.screenrecorder")
override init() {
// Set output file path (e.g., ~/Desktop/screen_recording.mov)
let desktopPath = NSSearchPathForDirectoriesInDomains(.desktopDirectory, .userDomainMask, true).first!
self.outputURL = URL(fileURLWithPath: "\(desktopPath)/screen_recording_\(Date().timeIntervalSince1970).mov")
super.init()
}
// Request permissions for screen recording and microphone
private func requestPermissions() async throws {
// Check screen recording permission
guard CGPreflightScreenCaptureAccess() else {
throw NSError(domain: "ScreenRecorder", code: -1, userInfo: [NSLocalizedDescriptionKey: "Screen recording permission denied. Please enable in System Settings > Privacy & Security > Screen Recording."])
}
// Request microphone permission
let microphoneStatus = AVCaptureDevice.authorizationStatus(for: .audio)
if microphoneStatus == .notDetermined {
let granted = await AVCaptureDevice.requestAccess(for: .audio)
if !granted {
throw NSError(domain: "ScreenRecorder", code: -2, userInfo: [NSLocalizedDescriptionKey: "Microphone permission denied."])
}
} else if microphoneStatus == .denied {
throw NSError(domain: "ScreenRecorder", code: -2, userInfo: [NSLocalizedDescriptionKey: "Microphone permission denied. Please enable in System Settings > Privacy & Security > Microphone."])
}
}
// Start recording
func startRecording() async {
do {
// Request permissions
try await requestPermissions()
// Get available content
let shareableContent = try await SCShareableContent.excludingDesktopWindows(false, onScreenWindowsOnly: true)
guard let display = shareableContent.displays.first else {
throw NSError(domain: "ScreenRecorder", code: -3, userInfo: [NSLocalizedDescriptionKey: "No display available."])
}
// Configure content filter (capture entire display)
let filter = SCContentFilter(display: display, excludingApplications: [], exceptingWindows: [])
// Configure stream
let streamConfig = SCStreamConfiguration()
streamConfig.width = display.width
streamConfig.height = display.height
streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 60) // 60 FPS
streamConfig.pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
streamConfig.showsCursor = true
streamConfig.capturesAudio = true // Enable audio (macOS 13.0+ for system audio)
streamConfig.queueDepth = 5
// Initialize AVAssetWriter
try setupAssetWriter()
// Create and start stream
stream = SCStream(filter: filter, configuration: streamConfig, delegate: self)
try stream?.addStreamOutput(self, type: .screen, sampleHandlerQueue: serialQueue)
try stream?.addStreamOutput(self, type: .audio, sampleHandlerQueue: serialQueue)
try await stream?.startCapture()
DispatchQueue.main.async {
self.isRecording = true
self.errorMessage = nil
}
} catch {
DispatchQueue.main.async {
self.errorMessage = error.localizedDescription
self.isRecording = false
}
}
}
// Stop recording
func stopRecording() async {
guard let stream = stream else { return }
do {
try await stream.stopCapture()
assetWriter?.finishWriting {
DispatchQueue.main.async {
self.isRecording = false
self.stream = nil
self.assetWriter = nil
self.assetWriterVideoInput = nil
self.assetWriterAudioInput = nil
print("Recording saved to: \(self.outputURL.path)")
}
}
} catch {
DispatchQueue.main.async {
self.errorMessage = error.localizedDescription
self.isRecording = false
}
}
}
// Setup AVAssetWriter for saving to file
private func setupAssetWriter() throws {
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: .mov)
// Video input
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: 1920, // Adjust based on display size
AVVideoHeightKey: 1080
]
assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
assetWriterVideoInput?.expectsMediaDataInRealTime = true
if let videoInput = assetWriterVideoInput, assetWriter!.canAdd(videoInput) {
assetWriter!.add(videoInput)
}
// Audio input
let audioSettings: [String: Any] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2
]
assetWriterAudioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
assetWriterAudioInput?.expectsMediaDataInRealTime = true
if let audioInput = assetWriterAudioInput, assetWriter!.canAdd(audioInput) {
assetWriter!.add(audioInput)
}
assetWriter!.startWriting()
assetWriter!.startSession(atSourceTime: .zero)
}
// SCStreamOutput: Handle video and audio samples
func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) {
guard CMSampleBufferGetNumSamples(sampleBuffer) > 0 else { return }
switch type {
case .screen:
if let videoInput = assetWriterVideoInput, videoInput.isReadyForMoreMediaData {
videoInput.append(sampleBuffer)
}
case .audio:
if let audioInput = assetWriterAudioInput, audioInput.isReadyForMoreMediaData {
audioInput.append(sampleBuffer)
}
@unknown default:
break
}
}
// SCStreamDelegate: Handle stream errors
func stream(_ stream: SCStream, didStopWithError error: Error) {
DispatchQueue.main.async {
self.isRecording = false
self.errorMessage = error.localizedDescription
self.stream = nil
self.assetWriter?.finishWriting {}
}
}
}
// MARK: - Preview View with Core Animation
struct PreviewView: NSViewRepresentable {
@ObservedObject var recorder: ScreenRecorder
func makeNSView(context: Context) -> NSView {
let view = NSView()
view.wantsLayer = true
view.layer?.backgroundColor = NSColor.black.cgColor
return view
}
func updateNSView(_ nsView: NSView, context: Context) {
// Apply fade-in animation when recording starts
if recorder.isRecording {
let fadeIn = CABasicAnimation(keyPath: "opacity")
fadeIn.fromValue = 0.0
fadeIn.toValue = 1.0
fadeIn.duration = 1.5
fadeIn.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
nsView.layer?.add(fadeIn, forKey: "fadeIn")
}
// Apply fade-out animation when recording stops
if !recorder.isRecording && nsView.layer?.animation(forKey: "fadeIn") != nil {
let fadeOut = CABasicAnimation(keyPath: "opacity")
fadeOut.fromValue = 1.0
fadeOut.toValue = 0.0
fadeOut.duration = 1.5
fadeOut.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
nsView.layer?.add(fadeOut, forKey: "fadeOut")
}
}
}
// MARK: - Main Content View
struct ContentView: View {
@StateObject private var recorder = ScreenRecorder()
var body: some View {
VStack(spacing: 20) {
Text("Screen Recorder")
.font(.title)
PreviewView(recorder: recorder)
.frame(width: 300, height: 200)
.border(Color.gray)
if let error = recorder.errorMessage {
Text("Error: \(error)")
.foregroundColor(.red)
}
Button(recorder.isRecording ? "Stop Recording" : "Start Recording") {
Task {
if recorder.isRecording {
await recorder.stopRecording()
} else {
await recorder.startRecording()
}
}
}
.disabled(recorder.errorMessage != nil && !recorder.isRecording)
}
.padding()
.frame(minWidth: 400, minHeight: 300)
}
}
// MARK: - App Entry Point
@main
struct ScreenRecorderApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}