SelfieRingLight/SelfieRingLight/Features/Camera/CameraViewModel.swift
Matt Bruce 74e65829de Initial commit: SelfieRingLight app
Features:
- Camera preview with ring light effect
- Adjustable ring size with slider
- Light color presets (white, warm cream, ice blue, soft pink, warm amber, cool lavender)
- Light intensity control (opacity)
- Front flash (hides preview during capture)
- True mirror mode
- Skin smoothing toggle
- Grid overlay (rule of thirds)
- Self-timer options
- Photo and video capture modes
- iCloud sync for settings across devices

Architecture:
- SwiftUI with @Observable view models
- Protocol-oriented design (RingLightConfigurable, CaptureControlling)
- Bedrock design system integration
- CloudSyncManager for iCloud settings sync
- RevenueCat for premium features
2026-01-02 13:01:24 -06:00

202 lines
7.6 KiB
Swift

import AVFoundation
import SwiftUI
import Photos
import CoreImage
import UIKit
import Bedrock
@MainActor
@Observable
class CameraViewModel: NSObject {
var isCameraAuthorized = false
var isPhotoLibraryAuthorized = false
var captureSession: AVCaptureSession?
var photoOutput: AVCapturePhotoOutput?
var videoOutput: AVCaptureMovieFileOutput?
var videoDataOutput: AVCaptureVideoDataOutput?
var previewLayer: AVCaptureVideoPreviewLayer?
var isUsingFrontCamera = true
var isRecording = false
var originalBrightness: CGFloat = 0.5
var ciContext = CIContext()
/// Whether the preview should be hidden (for front flash effect)
var isPreviewHidden = false
let settings = SettingsViewModel() // Shared config
// MARK: - Screen Brightness Handling
/// Gets the current screen from any available window scene
private var currentScreen: UIScreen? {
UIApplication.shared.connectedScenes
.compactMap { $0 as? UIWindowScene }
.first?.screen
}
private func saveCurrentBrightness() {
if let screen = currentScreen {
originalBrightness = screen.brightness
}
}
private func setBrightness(_ value: CGFloat) {
currentScreen?.brightness = value
}
func setupCamera() async {
isCameraAuthorized = await AVCaptureDevice.requestAccess(for: .video)
isPhotoLibraryAuthorized = await PHPhotoLibrary.requestAuthorization(for: .addOnly) == .authorized
guard isCameraAuthorized else { return }
captureSession = AVCaptureSession()
guard let session = captureSession else { return }
session.beginConfiguration()
session.sessionPreset = .high
let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: isUsingFrontCamera ? .front : .back)
guard let device, let input = try? AVCaptureDeviceInput(device: device) else { return }
if session.canAddInput(input) {
session.addInput(input)
}
photoOutput = AVCapturePhotoOutput()
if let photoOutput, session.canAddOutput(photoOutput) {
session.addOutput(photoOutput)
}
videoOutput = AVCaptureMovieFileOutput()
if let videoOutput, session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput?.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
if let videoDataOutput, session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
}
session.commitConfiguration()
session.startRunning()
UIApplication.shared.isIdleTimerDisabled = true
saveCurrentBrightness()
// Set screen to full brightness for best ring light effect
setBrightness(1.0)
}
func switchCamera() {
guard let session = captureSession else { return }
session.beginConfiguration()
session.inputs.forEach { session.removeInput($0) }
isUsingFrontCamera.toggle()
let position: AVCaptureDevice.Position = isUsingFrontCamera ? .front : .back
let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)
guard let device, let input = try? AVCaptureDeviceInput(device: device) else { return }
if session.canAddInput(input) {
session.addInput(input)
}
session.commitConfiguration()
}
func capturePhoto() {
// If front flash is enabled, hide the preview to show the ring light
if settings.isFrontFlashEnabled {
performFrontFlashCapture()
} else {
let captureSettings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: captureSettings, delegate: self)
}
}
/// Performs photo capture with front flash effect
private func performFrontFlashCapture() {
isPreviewHidden = true
// Brief delay to show the full ring light before capturing
Task {
try? await Task.sleep(for: .milliseconds(150))
let captureSettings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: captureSettings, delegate: self)
// Restore preview after capture completes
try? await Task.sleep(for: .milliseconds(200))
isPreviewHidden = false
}
}
func startRecording() {
guard let videoOutput = videoOutput, !isRecording else { return }
let url = FileManager.default.temporaryDirectory.appendingPathComponent("video.mov")
videoOutput.startRecording(to: url, recordingDelegate: self)
isRecording = true
}
func stopRecording() {
guard let videoOutput = videoOutput, isRecording else { return }
videoOutput.stopRecording()
isRecording = false
}
func restoreBrightness() {
setBrightness(originalBrightness)
UIApplication.shared.isIdleTimerDisabled = false
}
// Business logic: Check if ready to capture
var canCapture: Bool {
captureSession?.isRunning == true && isPhotoLibraryAuthorized
}
}
extension CameraViewModel: AVCapturePhotoCaptureDelegate {
nonisolated func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let data = photo.fileDataRepresentation() else { return }
PHPhotoLibrary.shared().performChanges {
PHAssetCreationRequest.forAsset().addResource(with: .photo, data: data, options: nil)
}
Task { @MainActor in
UIAccessibility.post(notification: .announcement, argument: String(localized: "Photo captured"))
}
}
}
extension CameraViewModel: AVCaptureFileOutputRecordingDelegate {
nonisolated func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
PHPhotoLibrary.shared().performChanges {
PHAssetCreationRequest.forAsset().addResource(with: .video, fileURL: outputFileURL, options: nil)
}
Task { @MainActor in
UIAccessibility.post(notification: .announcement, argument: String(localized: "Video saved"))
}
}
}
extension CameraViewModel: AVCaptureVideoDataOutputSampleBufferDelegate {
nonisolated func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Note: This runs on a background queue and cannot access @MainActor isolated properties directly
// For real skin smoothing, this would need to be implemented with a Metal-based approach
// or by using AVCaptureVideoDataOutput with custom rendering
// Basic skin smoothing placeholder - actual implementation would require:
// 1. CIContext created on this queue
// 2. Rendering to a Metal texture
// 3. Displaying via CAMetalLayer or similar
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
// Apply light gaussian blur for skin smoothing effect
guard let filter = CIFilter(name: "CIGaussianBlur") else { return }
filter.setValue(ciImage, forKey: kCIInputImageKey)
filter.setValue(1.0, forKey: kCIInputRadiusKey)
// For a complete implementation, render outputImage to the preview layer
_ = filter.outputImage
}
}