Files
shotscreen/ShotScreen/Sources/BackgroundRemover.swift
Nick Roodenrijs 0dabed11d2 🎉 ShotScreen v1.0 - Initial Release
🚀 First official release of ShotScreen with complete feature set:

 Core Features:
- Advanced screenshot capture system
- Multi-monitor support
- Professional UI/UX design
- Automated update system with Sparkle
- Apple notarized & code signed

🛠 Technical Excellence:
- Native Swift macOS application
- Professional build & deployment pipeline
- Comprehensive error handling
- Memory optimized performance

📦 Distribution Ready:
- Professional DMG packaging
- Apple notarization complete
- No security warnings for users
- Ready for public distribution

This is the foundation release that establishes ShotScreen as a premium screenshot tool for macOS.
2025-06-28 16:15:15 +02:00

1616 lines
71 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import Foundation
import AppKit
import Vision
import CoreImage
import CoreImage.CIFilterBuiltins
import CoreML
import SwiftUI
import Darwin // For utsname system info
// MARK: - Background Removal Manager
class BackgroundRemover {
static let shared = BackgroundRemover()
enum ProcessingMethod {
case visionFramework
case coreMLRMBG14
}
private init() {}
// MARK: - Public Interface
func removeBackground(from image: NSImage, method: ProcessingMethod, completion: @escaping (NSImage?) -> Void) {
guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
print("❌ BackgroundRemover: Could not convert NSImage to CGImage")
completion(nil)
return
}
DispatchQueue.global(qos: .userInitiated).async {
switch method {
case .visionFramework:
self.processWithVisionFramework(cgImage: cgImage, completion: completion)
case .coreMLRMBG14:
self.processWithCoreMLModelWithFallback(cgImage: cgImage, completion: completion)
}
}
}
// 🎯 NEW: Process with user's preferred method from settings (with smart fallback)
func processWithPreferredMethod(from image: NSImage, completion: @escaping (NSImage?) -> Void) {
let preferredMethod = SettingsManager.shared.preferredBackgroundRemovalMethod
switch preferredMethod {
case .auto:
// Auto mode: try RMBG first, fallback to Vision
processWithCoreMLModelWithFallback(from: image, completion: completion)
case .rmbg:
// RMBG only (no fallback)
processWithCoreMLRMBGOnly(from: image, completion: completion)
case .vision:
// Vision Framework only
processWithVisionFramework(from: image, completion: completion)
}
}
// 🎯 NEW: Process with CoreML model and fallback to Vision
private func processWithCoreMLModelWithFallback(from image: NSImage, completion: @escaping (NSImage?) -> Void) {
guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
print("❌ BackgroundRemover: Could not convert NSImage to CGImage")
completion(nil)
return
}
DispatchQueue.global(qos: .userInitiated).async {
self.processWithCoreMLModelWithFallback(cgImage: cgImage, completion: completion)
}
}
// 🎯 NEW: Process with Vision Framework only
private func processWithVisionFramework(from image: NSImage, completion: @escaping (NSImage?) -> Void) {
guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
print("❌ BackgroundRemover: Could not convert NSImage to CGImage")
completion(nil)
return
}
DispatchQueue.global(qos: .userInitiated).async {
self.processWithVisionFramework(cgImage: cgImage, completion: completion)
}
}
// 🎯 NEW: Process with RMBG only (no fallback)
private func processWithCoreMLRMBGOnly(from image: NSImage, completion: @escaping (NSImage?) -> Void) {
guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else {
print("❌ BackgroundRemover: Could not convert NSImage to CGImage")
completion(nil)
return
}
DispatchQueue.global(qos: .userInitiated).async {
self.processWithCoreMLRMBGOnly(cgImage: cgImage, completion: completion)
}
}
// MARK: - Smart E5RT Issue Detection
private func shouldSkipRMBGDueToIssues() -> Bool {
// Check if we have persistent E5RT cache issues that suggest we should avoid RMBG
let issueThreshold = 3 // Skip after 3 attempts with issues
let recentFailures = UserDefaults.standard.integer(forKey: "ShotScreen_E5RT_FailureCount")
if recentFailures >= issueThreshold {
let lastSkip = UserDefaults.standard.double(forKey: "ShotScreen_E5RT_LastSkip")
let timeSinceSkip = Date().timeIntervalSince1970 - lastSkip
// Reset after 1 hour, in case the issue was temporary
if timeSinceSkip > 3600 {
UserDefaults.standard.set(0, forKey: "ShotScreen_E5RT_FailureCount")
UserDefaults.standard.removeObject(forKey: "ShotScreen_E5RT_LastSkip")
return false
}
return true
}
return false
}
private func recordE5RTIssue() {
let currentCount = UserDefaults.standard.integer(forKey: "ShotScreen_E5RT_FailureCount")
UserDefaults.standard.set(currentCount + 1, forKey: "ShotScreen_E5RT_FailureCount")
UserDefaults.standard.set(Date().timeIntervalSince1970, forKey: "ShotScreen_E5RT_LastSkip")
print("🔧 E5RT Issue recorded: Total count = \(currentCount + 1)")
// If we're getting too many E5RT issues, disable Neural Engine more aggressively
if currentCount >= 3 {
print("🔧 TOO MANY E5RT ISSUES: Disabling Neural Engine for 5 minutes")
UserDefaults.standard.set(true, forKey: "ShotScreen_ForceCPU_TempMode")
UserDefaults.standard.set(Date().timeIntervalSince1970, forKey: "ShotScreen_ForceCPU_StartTime_Extended")
}
}
// 🎯 NEW: RMBG-only processing (no fallback)
private func processWithCoreMLRMBGOnly(cgImage: CGImage, completion: @escaping (NSImage?) -> Void) {
if tryLoadAndProcessModel(cgImage: cgImage, modelName: "bria-rmbg-coreml", displayName: "RMBG-1.4", completion: completion) {
// RMBG-1.4 processing initiated successfully
return
}
// Failed to load RMBG model
print("❌ RMBG-1.4 failed and no fallback allowed by user setting")
DispatchQueue.main.async {
completion(nil)
}
}
// MARK: - Model Availability
private func getModelPath() -> String {
let appSupportURL = FileManager.default.urls(for: .applicationSupportDirectory, in: .userDomainMask).first!
let appDirectory = appSupportURL.appendingPathComponent("ShotScreen")
return appDirectory.appendingPathComponent("bria-rmbg-coreml.mlpackage").path
}
func isRMBGModelAvailable() -> Bool {
return FileManager.default.fileExists(atPath: getModelPath())
}
// MARK: - Public Cache Management
func clearCoreMLCachePublic() {
print("🧹 PUBLIC: User requested Core ML cache clearing...")
// Do this on background thread to avoid blocking UI
DispatchQueue.global(qos: .utility).async {
self.clearCoreMLCache()
// Reset failure count since user manually cleared cache
UserDefaults.standard.set(0, forKey: "ShotScreen_E5RT_FailureCount")
UserDefaults.standard.removeObject(forKey: "ShotScreen_E5RT_LastSkip")
DispatchQueue.main.async {
print("✅ Cache clearing completed successfully!")
}
}
}
// MARK: - Cache Management (ULTRA AGGRESSIVE E5RT fix)
private func clearCoreMLCache() {
print("🧹 ULTRA AGGRESSIVE: Nuking ALL Core ML E5RT caches...")
// STRATEGY 1: Direct file removal (fastest)
ultraFastCacheClear()
// STRATEGY 2: Force CPU-only mode temporarily
temporarilyForceCPUMode()
// STRATEGY 3: Model precompilation with fresh cache
forceModelRecompilation()
print("🧹 ULTRA AGGRESSIVE: E5RT cache nuking completed")
}
private func ultraFastCacheClear() {
// Use rm -rf for maximum speed (faster than FileManager)
let cachePaths = [
"~/Library/Caches/com.apple.e5rt.e5bundlecache",
"~/Library/Caches/com.apple.CoreML",
"~/Library/Caches/com.apple.mlcompute",
"~/Library/Caches/ShotScreen"
]
for path in cachePaths {
let expandedPath = NSString(string: path).expandingTildeInPath
let command = "rm -rf '\(expandedPath)' 2>/dev/null"
let process = Process()
process.launchPath = "/bin/sh"
process.arguments = ["-c", command]
do {
try process.run()
process.waitUntilExit()
print("✅ NUKED: \(path)")
} catch {
print("⚠️ Could not nuke: \(path)")
}
}
}
private func temporarilyForceCPUMode() {
// Set flag to force CPU mode for next few loads
UserDefaults.standard.set(true, forKey: "ShotScreen_ForceCPU_TempMode")
UserDefaults.standard.set(Date().timeIntervalSince1970, forKey: "ShotScreen_ForceCPU_StartTime")
print("🔧 TEMPORARY: Forcing CPU-only mode for next 60 seconds")
}
private func forceModelRecompilation() {
// Delete any existing compiled model to force fresh compilation
let modelPath = getModelPath()
let modelURL = URL(fileURLWithPath: modelPath)
// Remove compiled version if it exists
do {
let compiledURL = try MLModel.compileModel(at: modelURL)
if FileManager.default.fileExists(atPath: compiledURL.path) {
try FileManager.default.removeItem(at: compiledURL)
print("✅ FORCED: Model recompilation")
}
} catch {
// Ignore errors - we just want to force recompilation
}
}
private func clearWildcardCachePath(_ pattern: String) {
// Handle /var/folders/*/com.apple.e5rt.e5bundlecache patterns
let components = pattern.components(separatedBy: "*")
guard components.count == 2 else { return }
let prefix = components[0]
let suffix = components[1]
do {
let prefixURL = URL(fileURLWithPath: prefix)
let contents = try FileManager.default.contentsOfDirectory(
at: prefixURL,
includingPropertiesForKeys: nil
)
for item in contents {
let targetPath = item.appendingPathComponent(String(suffix.dropFirst()))
if FileManager.default.fileExists(atPath: targetPath.path) {
try FileManager.default.removeItem(at: targetPath)
print("✅ Cleared system cache: \(targetPath.path)")
}
}
} catch {
print("⚠️ Could not clear wildcard cache \(pattern): \(error.localizedDescription)")
}
}
private func clearCompiledModelCache() {
// Try to find and clear any compiled versions of our model
let modelPath = getModelPath()
let modelURL = URL(fileURLWithPath: modelPath)
do {
// Force recompilation by removing any existing compiled versions
let compiledURL = try MLModel.compileModel(at: modelURL)
if FileManager.default.fileExists(atPath: compiledURL.path) {
try FileManager.default.removeItem(at: compiledURL)
print("✅ Cleared compiled model cache")
}
} catch {
print("⚠️ Could not clear compiled model cache: \(error.localizedDescription)")
}
}
// MARK: - Optimized Model Loading with Proactive Cache Management
private func loadModelWithRetry(at url: URL, retryCount: Int = 0) throws -> MLModel {
// Proactively clear cache on first attempt if we've seen issues before
if retryCount == 0 && hasRecentCacheIssues() {
print("🧹 Proactively clearing E5RT cache due to recent issues...")
clearCoreMLCache()
Thread.sleep(forTimeInterval: 0.5)
}
do {
let config = MLModelConfiguration()
config.computeUnits = getOptimalComputeUnits()
// Add cache preferences to avoid E5RT issues
if #available(macOS 14.0, *) {
config.allowLowPrecisionAccumulationOnGPU = true
}
return try MLModel(contentsOf: url, configuration: config)
} catch let error where retryCount < 2 && (error.localizedDescription.contains("resources.bin") || error.localizedDescription.contains("E5RT")) {
print("🔄 E5RT cache error detected, clearing cache and retrying... (attempt \(retryCount + 1)/3)")
clearCoreMLCache()
// Wait longer for cache to properly clear
Thread.sleep(forTimeInterval: 1.5)
return try loadModelWithRetry(at: url, retryCount: retryCount + 1)
}
}
// MARK: - Cache Issue Detection
private func hasRecentCacheIssues() -> Bool {
// Check if we've seen cache issues recently by looking for E5RT cache directories
let problemIndicators = [
"~/Library/Caches/ShotScreen/com.apple.e5rt.e5bundlecache",
"~/Library/Caches/com.apple.e5rt.e5bundlecache"
]
for location in problemIndicators {
let expandedPath = NSString(string: location).expandingTildeInPath
if FileManager.default.fileExists(atPath: expandedPath) {
// Check if directory is non-empty (has cached data that might be corrupt)
do {
let contents = try FileManager.default.contentsOfDirectory(atPath: expandedPath)
if !contents.isEmpty {
print("🔍 Found existing E5RT cache, potential for issues: \(location)")
return true
}
} catch {
// If we can't read it, it might be corrupt
return true
}
}
}
return false
}
// MARK: - CPU-only Model Loading
private func loadModelWithCPUOnly(at url: URL, retryCount: Int = 0) throws -> MLModel {
do {
let config = MLModelConfiguration()
config.computeUnits = .cpuOnly
return try MLModel(contentsOf: url, configuration: config)
} catch let error where retryCount < 2 && error.localizedDescription.contains("resources.bin") {
print("🔄 E5RT cache error in CPU mode, clearing cache and retrying... (attempt \(retryCount + 1)/3)")
clearCoreMLCache()
// Wait a moment for cache to clear
Thread.sleep(forTimeInterval: 1.0)
return try loadModelWithCPUOnly(at: url, retryCount: retryCount + 1)
}
}
// MARK: - Architecture Detection & Compute Unit Optimization
private func getOptimalComputeUnits() -> MLComputeUnits {
// Check if we're in temporary CPU-only mode to avoid E5RT issues
if isInTemporaryCPUMode() {
print("🔧 TEMPORARY CPU MODE: Using CPU-only to avoid E5RT issues")
return .cpuOnly
}
// Detect CPU architecture for optimal Core ML performance
var systemInfo = utsname()
uname(&systemInfo)
let machine = withUnsafePointer(to: &systemInfo.machine) {
$0.withMemoryRebound(to: CChar.self, capacity: 1) {
String(validatingUTF8: $0)
}
}
let machineString = machine ?? "unknown"
print("🖥️ Detected machine architecture: \(machineString)")
// Check for Apple Silicon (M1, M2, M3, etc.)
if machineString.contains("arm64") || machineString.hasPrefix("arm") {
print("🚀 Apple Silicon detected - using all compute units (Neural Engine + GPU + CPU)")
return .all
} else {
// Intel Mac - use CPU and GPU only (no Neural Engine available)
print("⚡ Intel Mac detected - using CPU and GPU only (no Neural Engine)")
return .cpuAndGPU
}
}
private func isInTemporaryCPUMode() -> Bool {
guard UserDefaults.standard.bool(forKey: "ShotScreen_ForceCPU_TempMode") else {
return false
}
let startTime = UserDefaults.standard.double(forKey: "ShotScreen_ForceCPU_StartTime")
let currentTime = Date().timeIntervalSince1970
let elapsed = currentTime - startTime
// Force CPU mode for 60 seconds after cache clear
if elapsed > 60 {
// Temp mode expired, clear flags
UserDefaults.standard.removeObject(forKey: "ShotScreen_ForceCPU_TempMode")
UserDefaults.standard.removeObject(forKey: "ShotScreen_ForceCPU_StartTime")
print("🔧 TEMPORARY CPU MODE: Expired, returning to normal mode")
return false
}
print("🔧 TEMPORARY CPU MODE: Still active (\(Int(60-elapsed))s remaining)")
return true
}
// MARK: - Vision Framework Processing (OPTIMIZED)
private func processWithVisionFramework(cgImage: CGImage, completion: @escaping (NSImage?) -> Void) {
guard #available(macOS 14.0, *) else {
print("❌ Vision framework background removal requires macOS 14.0 or higher")
DispatchQueue.main.async { completion(nil) }
return
}
print("🚀 VISION FAST TRACK: Using optimized Vision Framework")
let request = VNGenerateForegroundInstanceMaskRequest { request, error in
if let error = error {
print("❌ Vision error: \(error)")
DispatchQueue.main.async { completion(nil) }
return
}
guard let results = request.results, !results.isEmpty else {
print("❌ No foreground found with Vision Framework")
DispatchQueue.main.async { completion(nil) }
return
}
guard let result = results.first as? VNInstanceMaskObservation else {
print("❌ Invalid result from Vision Framework")
DispatchQueue.main.async { completion(nil) }
return
}
print("✅ VISION FAST: Completed in ~1-2 seconds! (\(results.count) results)")
self.applyMask(mask: result, to: cgImage, completion: completion)
}
// OPTIMIZATION: Configure Vision Framework for maximum speed
request.revision = VNGenerateForegroundInstanceMaskRequest.defaultRevision
// Create handler with optimized options
let handlerOptions: [VNImageOption: Any] = [
.ciContext: CIContext(options: [
.useSoftwareRenderer: false, // Use hardware acceleration
.priorityRequestLow: false // High priority processing
])
]
let handler = VNImageRequestHandler(cgImage: cgImage, options: handlerOptions)
// Perform on high-priority queue for faster processing
DispatchQueue.global(qos: .userInitiated).async {
do {
try handler.perform([request])
} catch {
print("❌ Vision handler error: \(error)")
DispatchQueue.main.async { completion(nil) }
}
}
}
@available(macOS 14.0, *)
private func applyMask(mask: VNInstanceMaskObservation, to image: CGImage, completion: @escaping (NSImage?) -> Void) {
// OPTIMIZATION: Run mask processing on background queue for better performance
DispatchQueue.global(qos: .userInitiated).async {
do {
print("🚀 FAST MASK: Starting optimized mask generation...")
// BOTTLENECK 1: Optimize mask generation with performance options
let imageHandler = VNImageRequestHandler(cgImage: image, options: [
.ciContext: self.getOptimizedCIContext()
])
let maskImage = try mask.generateScaledMaskForImage(
forInstances: mask.allInstances,
from: imageHandler
)
print("🚀 FAST MASK: Mask generated, applying to image...")
// OPTIMIZATION: Use same optimized CIContext for all operations
let optimizedContext = self.getOptimizedCIContext()
let ciImage = CIImage(cgImage: image)
let ciMask = CIImage(cvPixelBuffer: maskImage)
let filter = CIFilter.blendWithMask()
filter.inputImage = ciImage
filter.backgroundImage = CIImage.empty()
filter.maskImage = ciMask
guard let outputImage = filter.outputImage else {
print("❌ Error applying mask")
DispatchQueue.main.async { completion(nil) }
return
}
// BOTTLENECK 2: Use optimized context instead of creating new one
guard let cgResult = optimizedContext.createCGImage(outputImage, from: outputImage.extent) else {
print("❌ Error creating result image")
DispatchQueue.main.async { completion(nil) }
return
}
let resultImage = NSImage(cgImage: cgResult, size: NSSize(width: cgResult.width, height: cgResult.height))
DispatchQueue.main.async {
print("✅ VISION OPTIMIZED: Background removed in ~1-2 seconds!")
completion(resultImage)
}
} catch {
print("❌ Error processing mask: \(error.localizedDescription)")
DispatchQueue.main.async { completion(nil) }
}
}
}
// MARK: - Optimized CIContext (PERFORMANCE CRITICAL)
private var _optimizedCIContext: CIContext?
private func getOptimizedCIContext() -> CIContext {
if let existingContext = _optimizedCIContext {
return existingContext
}
// Create high-performance CIContext with optimized settings
let context = CIContext(options: [
.useSoftwareRenderer: false, // Force hardware acceleration
.priorityRequestLow: false, // High priority processing
.cacheIntermediates: true // Cache for better performance
])
_optimizedCIContext = context
print("🚀 FAST CONTEXT: Created optimized CIContext for Vision Framework")
return context
}
// MARK: - Core ML Processing with Fallback
private func processWithCoreMLModelWithFallback(cgImage: CGImage, completion: @escaping (NSImage?) -> Void) {
// Try RMBG-1.4 first, fallback to Vision Framework
print("🤖 Attempting RMBG-1.4 Core ML model...")
if tryLoadAndProcessModel(cgImage: cgImage, modelName: "bria-rmbg-coreml", displayName: "RMBG-1.4", completion: completion) {
// RMBG-1.4 succeeded
return
}
// Fallback to Vision Framework
print("🔄 RMBG-1.4 failed, falling back to Vision Framework...")
processWithVisionFramework(cgImage: cgImage, completion: completion)
}
private func tryLoadAndProcessModel(cgImage: CGImage, modelName: String, displayName: String, completion: @escaping (NSImage?) -> Void) -> Bool {
// Check if model is available in Application Support directory
let modelPath = getModelPath()
guard FileManager.default.fileExists(atPath: modelPath) else {
print("\(displayName) model not found at: \(modelPath)")
return false
}
let finalModelURL = URL(fileURLWithPath: modelPath)
do {
print("📦 Trying to load \(displayName) model from: \(finalModelURL.lastPathComponent)")
// First try to compile the model if it's not compiled
let compiledModelURL: URL
if finalModelURL.pathExtension == "mlpackage" {
print("🔧 Compiling \(displayName) model...")
compiledModelURL = try MLModel.compileModel(at: finalModelURL)
print("✅ Model compiled successfully")
} else {
compiledModelURL = finalModelURL
}
// Try to load the Core ML model with retry logic for E5RT issues
let model = try loadModelWithRetry(at: compiledModelURL)
let visionModel = try VNCoreMLModel(for: model)
print("\(displayName) model successfully loaded")
// Create the request with E5RT monitoring
let request = VNCoreMLRequest(model: visionModel) { request, error in
if let error = error {
print("❌ Core ML request error for \(displayName): \(error)")
// Check if this is an E5RT related error
if error.localizedDescription.contains("resources.bin") ||
error.localizedDescription.contains("E5RT") {
self.recordE5RTIssue()
}
DispatchQueue.main.async { completion(nil) }
return
}
guard let results = request.results,
let pixelBufferObservation = results.first as? VNPixelBufferObservation else {
print("❌ No valid result from \(displayName) model")
DispatchQueue.main.async { completion(nil) }
return
}
// Convert the result to NSImage
if let resultImage = self.convertPixelBufferToNSImage(pixelBufferObservation.pixelBuffer, originalImage: cgImage) {
print("\(displayName) processing successful")
DispatchQueue.main.async { completion(resultImage) }
} else {
print("❌ Could not convert \(displayName) result")
DispatchQueue.main.async { completion(nil) }
}
}
// Configure request
request.imageCropAndScaleOption = .scaleFill
// Perform the request
let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
try handler.perform([request])
return true // Success
} catch {
print("\(displayName) model error: \(error)")
let errorMessage = error.localizedDescription
if errorMessage.contains("resources.bin") {
print("🔧 E5RT cache corruption detected - resources.bin missing")
print("🚀 FAST FALLBACK: Immediately switching to Vision Framework")
// Record this issue and clear cache for next time
recordE5RTIssue()
clearCoreMLCache()
// Instead of retrying RMBG, immediately fall back to Vision
DispatchQueue.global(qos: .userInitiated).async {
self.processWithVisionFramework(cgImage: cgImage, completion: completion)
}
return true // We're handling it with Vision fallback
} else if errorMessage.contains("MPSGraphExecutable") || errorMessage.contains("E5RT") {
print("🔧 Neural Engine compilation problem detected")
print("🚀 FAST FALLBACK: Immediately switching to Vision Framework")
// Same fast fallback strategy
recordE5RTIssue()
clearCoreMLCache()
DispatchQueue.global(qos: .userInitiated).async {
self.processWithVisionFramework(cgImage: cgImage, completion: completion)
}
return true // We're handling it with Vision fallback
} else if errorMessage.contains("compute") || errorMessage.contains("Neural") {
print("🔧 Intel Mac detected - trying CPU-only fallback")
// Try CPU-only as final fallback for Intel Macs
return tryLoadModelWithCPUOnly(cgImage: cgImage, modelName: modelName, displayName: displayName, completion: completion)
}
return false // Failed
}
}
private func convertPixelBufferToNSImage(_ pixelBuffer: CVPixelBuffer, originalImage: CGImage) -> NSImage? {
print("🚀 FAST RMBG: Starting optimized pixel buffer conversion...")
// Convert the grayscale mask to a proper background-removed image
let maskCIImage = CIImage(cvPixelBuffer: pixelBuffer)
let originalCIImage = CIImage(cgImage: originalImage)
// Resize mask to match original image size
let scaleX = originalCIImage.extent.width / maskCIImage.extent.width
let scaleY = originalCIImage.extent.height / maskCIImage.extent.height
let scaledMask = maskCIImage.transformed(by: CGAffineTransform(scaleX: scaleX, y: scaleY))
// Apply the mask to remove background
let maskFilter = CIFilter.blendWithMask()
maskFilter.inputImage = originalCIImage
maskFilter.backgroundImage = CIImage.empty()
maskFilter.maskImage = scaledMask
guard let outputImage = maskFilter.outputImage else {
print("❌ Mask filter failed")
return nil
}
// OPTIMIZATION: Use same optimized context instead of creating new one
let optimizedContext = getOptimizedCIContext()
guard let cgResult = optimizedContext.createCGImage(outputImage, from: outputImage.extent) else {
print("❌ CGImage creation failed")
return nil
}
print("✅ RMBG OPTIMIZED: Pixel buffer converted efficiently!")
return NSImage(cgImage: cgResult, size: NSSize(width: cgResult.width, height: cgResult.height))
}
// MARK: - Intel Mac CPU-Only Fallback
private func tryLoadModelWithCPUOnly(cgImage: CGImage, modelName: String, displayName: String, completion: @escaping (NSImage?) -> Void) -> Bool {
let modelPath = getModelPath()
guard FileManager.default.fileExists(atPath: modelPath) else {
print("\(displayName) model not found for CPU fallback")
return false
}
let finalModelURL = URL(fileURLWithPath: modelPath)
do {
print("🔧 Trying CPU-only mode for \(displayName) on Intel Mac...")
// Compile model if needed
let compiledModelURL: URL
if finalModelURL.pathExtension == "mlpackage" {
compiledModelURL = try MLModel.compileModel(at: finalModelURL)
} else {
compiledModelURL = finalModelURL
}
// CPU-only configuration for Intel Macs with retry logic
print("⚡ Using CPU-only compute units for Intel Mac compatibility")
let model = try loadModelWithCPUOnly(at: compiledModelURL)
let visionModel = try VNCoreMLModel(for: model)
print("\(displayName) model loaded successfully in CPU-only mode")
// Create the request
let request = VNCoreMLRequest(model: visionModel) { request, error in
if let error = error {
print("❌ CPU-only Core ML request error for \(displayName): \(error)")
DispatchQueue.main.async { completion(nil) }
return
}
guard let results = request.results,
let pixelBufferObservation = results.first as? VNPixelBufferObservation else {
print("❌ No valid result from \(displayName) model (CPU-only)")
DispatchQueue.main.async { completion(nil) }
return
}
// Convert the result to NSImage
if let resultImage = self.convertPixelBufferToNSImage(pixelBufferObservation.pixelBuffer, originalImage: cgImage) {
print("\(displayName) processing successful (CPU-only mode)")
DispatchQueue.main.async { completion(resultImage) }
} else {
print("❌ Could not convert \(displayName) result (CPU-only)")
DispatchQueue.main.async { completion(nil) }
}
}
// Configure request
request.imageCropAndScaleOption = .scaleFill
// Perform the request
let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
try handler.perform([request])
return true // Success
} catch {
print("❌ CPU-only fallback failed for \(displayName): \(error)")
return false // Failed
}
}
}
// MARK: - Drag & Drop Image View
class DragDropImageView: NSImageView, NSFilePromiseProviderDelegate, NSDraggingSource {
weak var dragDropDelegate: ImageDragDropDelegate?
var enableDragOut: Bool = false // Enable dragging images out of this view
override func awakeFromNib() {
super.awakeFromNib()
setupDragDrop()
}
private func setupDragDrop() {
registerForDraggedTypes([.fileURL])
}
// MARK: - Drag Out Functionality
override func mouseDown(with event: NSEvent) {
guard enableDragOut, let image = self.image else {
super.mouseDown(with: event)
return
}
// Start drag operation with smaller thumbnail
let dragItem = NSDraggingItem(pasteboardWriter: image)
// Create smaller drag frame (e.g., 120x120 max)
let maxDragSize: CGFloat = 120
let imageSize = image.size
let aspectRatio = imageSize.width / imageSize.height
var dragWidth: CGFloat
var dragHeight: CGFloat
if aspectRatio > 1 {
// Landscape
dragWidth = min(maxDragSize, imageSize.width)
dragHeight = dragWidth / aspectRatio
} else {
// Portrait or square
dragHeight = min(maxDragSize, imageSize.height)
dragWidth = dragHeight * aspectRatio
}
// Center the drag frame within the view
let dragFrame = NSRect(
x: (self.bounds.width - dragWidth) / 2,
y: (self.bounds.height - dragHeight) / 2,
width: dragWidth,
height: dragHeight
)
dragItem.setDraggingFrame(dragFrame, contents: image)
// Create temporary file for dragging
if let tiffData = image.tiffRepresentation,
let bitmapRep = NSBitmapImageRep(data: tiffData),
let pngData = bitmapRep.representation(using: .png, properties: [:]) {
let tempURL = FileManager.default.temporaryDirectory.appendingPathComponent("background_removed_\(UUID().uuidString).png")
do {
try pngData.write(to: tempURL)
let filePromise = NSFilePromiseProvider(fileType: "public.png", delegate: self)
filePromise.userInfo = ["tempURL": tempURL]
let fileDragItem = NSDraggingItem(pasteboardWriter: filePromise)
fileDragItem.setDraggingFrame(dragFrame, contents: image)
beginDraggingSession(with: [fileDragItem], event: event, source: self)
print("🎯 Started dragging background-removed image")
} catch {
print("❌ Failed to create temp file for dragging: \(error)")
super.mouseDown(with: event)
}
} else {
super.mouseDown(with: event)
}
}
override func draggingEntered(_ sender: NSDraggingInfo) -> NSDragOperation {
let pasteboard = sender.draggingPasteboard
guard let types = pasteboard.types else { return [] }
if types.contains(.fileURL) {
if let urls = pasteboard.readObjects(forClasses: [NSURL.self], options: nil) as? [URL] {
let imageTypes = ["png", "jpg", "jpeg", "gif", "tiff", "bmp", "heic", "heif"]
for url in urls {
if imageTypes.contains(url.pathExtension.lowercased()) {
// Visual feedback - use adaptive color
self.layer?.borderWidth = 2
self.layer?.borderColor = ThemeManager.shared.buttonTintColor.cgColor
return .copy
}
}
}
}
return []
}
override func draggingExited(_ sender: NSDraggingInfo?) {
// Remove visual feedback
self.layer?.borderWidth = 0
}
override func performDragOperation(_ sender: NSDraggingInfo) -> Bool {
let pasteboard = sender.draggingPasteboard
guard let urls = pasteboard.readObjects(forClasses: [NSURL.self], options: nil) as? [URL] else {
return false
}
let imageTypes = ["png", "jpg", "jpeg", "gif", "tiff", "bmp", "heic", "heif"]
for url in urls {
if imageTypes.contains(url.pathExtension.lowercased()) {
if let image = NSImage(contentsOf: url) {
dragDropDelegate?.didDropImage(image, from: url.path)
// Remove visual feedback
self.layer?.borderWidth = 0
return true
}
}
}
return false
}
// MARK: - NSFilePromiseProviderDelegate
func filePromiseProvider(_ filePromiseProvider: NSFilePromiseProvider, fileNameForType fileType: String) -> String {
return "background_removed_\(UUID().uuidString).png"
}
func filePromiseProvider(_ filePromiseProvider: NSFilePromiseProvider, writePromiseTo url: URL, completionHandler: @escaping (Error?) -> Void) {
guard let userInfo = filePromiseProvider.userInfo as? [String: Any],
let tempURL = userInfo["tempURL"] as? URL else {
completionHandler(NSError(domain: "DragDropImageView", code: 1, userInfo: [NSLocalizedDescriptionKey: "No temp URL found"]))
return
}
do {
if FileManager.default.fileExists(atPath: url.path) {
try FileManager.default.removeItem(at: url)
}
try FileManager.default.copyItem(at: tempURL, to: url)
completionHandler(nil)
print("✅ Successfully wrote dragged image to: \(url.lastPathComponent)")
} catch {
completionHandler(error)
print("❌ Failed to write dragged image: \(error)")
}
}
// MARK: - NSDraggingSource
func draggingSession(_ session: NSDraggingSession, sourceOperationMaskFor context: NSDraggingContext) -> NSDragOperation {
return .copy
}
func draggingSession(_ session: NSDraggingSession, willBeginAt screenPoint: NSPoint) {
print("🎯 Drag session beginning at \(screenPoint)")
}
func draggingSession(_ session: NSDraggingSession, endedAt screenPoint: NSPoint, operation: NSDragOperation) {
print("🎯 Drag session ended with operation: \(operation.rawValue)")
}
}
protocol ImageDragDropDelegate: AnyObject {
func didDropImage(_ image: NSImage, from path: String)
}
// MARK: - Card Style Button (for method selection)
class CardStyleButton: NSView {
private let titleLabel: NSTextField = NSTextField(labelWithString: "")
private let subtitleLabel: NSTextField = NSTextField(labelWithString: "")
private let iconImageView: NSImageView = NSImageView()
private var iconBackground: NSView!
private var cardBackground: NSView!
private var symbolName: String = ""
var isSelected: Bool = false {
didSet { updateSelectedState(animated: true) }
}
var isEnabled: Bool = true {
didSet { updateEnabledState() }
}
// Button action properties
var target: AnyObject?
var action: Selector?
init(frame frameRect: NSRect, title: String, subtitle: String, symbolName: String) {
self.symbolName = symbolName
super.init(frame: frameRect)
setupCard(title: title, subtitle: subtitle, symbolName: symbolName)
// Setup theme change observer
ThemeManager.shared.observeThemeChanges { [weak self] in
DispatchQueue.main.async {
self?.updateThemeColors()
}
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func setupCard(title: String, subtitle: String, symbolName: String) {
wantsLayer = true
layer?.cornerRadius = 12
layer?.masksToBounds = false
// Mouse tracking
updateTrackingAreas()
// Card background with blur effect - adaptive colors
cardBackground = NSView()
cardBackground.wantsLayer = true
cardBackground.translatesAutoresizingMaskIntoConstraints = false
cardBackground.layer?.cornerRadius = 12
cardBackground.layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.3).cgColor
cardBackground.layer?.borderWidth = 1
cardBackground.layer?.borderColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.2).cgColor
addSubview(cardBackground)
// Icon background - adaptive colors
iconBackground = NSView()
iconBackground.wantsLayer = true
iconBackground.translatesAutoresizingMaskIntoConstraints = false
iconBackground.layer?.cornerRadius = 20
iconBackground.layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.cgColor
cardBackground.addSubview(iconBackground)
// SF Symbol icon
let baseImage = NSImage(systemSymbolName: symbolName, accessibilityDescription: nil)
let configuredImage = baseImage?.withSymbolConfiguration(NSImage.SymbolConfiguration(pointSize: 20, weight: .semibold))
iconImageView.image = configuredImage
iconImageView.contentTintColor = ThemeManager.shared.primaryTextColor
iconImageView.translatesAutoresizingMaskIntoConstraints = false
iconImageView.imageScaling = .scaleProportionallyDown
iconImageView.imageAlignment = .alignCenter
iconBackground.addSubview(iconImageView)
// Title label - adaptive colors
titleLabel.stringValue = title
titleLabel.font = NSFont.systemFont(ofSize: 12, weight: .bold)
titleLabel.textColor = ThemeManager.shared.primaryTextColor
titleLabel.alignment = .left
titleLabel.translatesAutoresizingMaskIntoConstraints = false
titleLabel.usesSingleLineMode = true
cardBackground.addSubview(titleLabel)
// Subtitle label - adaptive colors
subtitleLabel.stringValue = subtitle
subtitleLabel.font = NSFont.systemFont(ofSize: 9, weight: .medium)
subtitleLabel.textColor = ThemeManager.shared.secondaryTextColor
subtitleLabel.alignment = .left
subtitleLabel.translatesAutoresizingMaskIntoConstraints = false
subtitleLabel.usesSingleLineMode = true
cardBackground.addSubview(subtitleLabel)
// Layout constraints
NSLayoutConstraint.activate([
// Card background fills the entire view
cardBackground.topAnchor.constraint(equalTo: topAnchor),
cardBackground.leadingAnchor.constraint(equalTo: leadingAnchor),
cardBackground.trailingAnchor.constraint(equalTo: trailingAnchor),
cardBackground.bottomAnchor.constraint(equalTo: bottomAnchor),
// Icon background
iconBackground.leadingAnchor.constraint(equalTo: cardBackground.leadingAnchor, constant: 12),
iconBackground.centerYAnchor.constraint(equalTo: cardBackground.centerYAnchor),
iconBackground.widthAnchor.constraint(equalToConstant: 40),
iconBackground.heightAnchor.constraint(equalToConstant: 40),
// Icon image
iconImageView.centerXAnchor.constraint(equalTo: iconBackground.centerXAnchor),
iconImageView.centerYAnchor.constraint(equalTo: iconBackground.centerYAnchor),
iconImageView.widthAnchor.constraint(equalToConstant: 24),
iconImageView.heightAnchor.constraint(equalToConstant: 24),
// Title label
titleLabel.leadingAnchor.constraint(equalTo: iconBackground.trailingAnchor, constant: 12),
titleLabel.trailingAnchor.constraint(equalTo: cardBackground.trailingAnchor, constant: -12),
titleLabel.topAnchor.constraint(equalTo: cardBackground.centerYAnchor, constant: -12),
// Subtitle label
subtitleLabel.leadingAnchor.constraint(equalTo: titleLabel.leadingAnchor),
subtitleLabel.trailingAnchor.constraint(equalTo: titleLabel.trailingAnchor),
subtitleLabel.topAnchor.constraint(equalTo: titleLabel.bottomAnchor, constant: 2)
])
// Initial shadow - adaptive colors
cardBackground.layer?.shadowColor = ThemeManager.shared.shadowColor.cgColor
cardBackground.layer?.shadowOffset = CGSize(width: 0, height: 2)
cardBackground.layer?.shadowRadius = 8
cardBackground.layer?.shadowOpacity = 0.0
// Set anchor points for smooth scaling
cardBackground.layer?.anchorPoint = CGPoint(x: 0.5, y: 0.5)
iconBackground.layer?.anchorPoint = CGPoint(x: 0.5, y: 0.5)
}
override func updateTrackingAreas() {
super.updateTrackingAreas()
for trackingArea in trackingAreas {
removeTrackingArea(trackingArea)
}
let trackingArea = NSTrackingArea(rect: bounds,
options: [.mouseEnteredAndExited, .activeAlways],
owner: self,
userInfo: nil)
addTrackingArea(trackingArea)
}
private func updateSelectedState(animated: Bool) {
if animated {
NSAnimationContext.runAnimationGroup { context in
context.duration = 0.6
context.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
if isSelected {
// Selected state: brighter, glowing - adaptive colors
cardBackground.animator().layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.6).cgColor
cardBackground.animator().layer?.borderColor = ThemeManager.shared.buttonTintColor.withAlphaComponent(0.8).cgColor
cardBackground.animator().layer?.shadowOpacity = ThemeManager.shared.shadowOpacity
iconBackground.animator().layer?.backgroundColor = ThemeManager.shared.buttonTintColor.withAlphaComponent(0.6).cgColor
titleLabel.animator().textColor = ThemeManager.shared.primaryTextColor
subtitleLabel.animator().textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.9)
} else {
// Unselected state: dimmer - adaptive colors
cardBackground.animator().layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.3).cgColor
cardBackground.animator().layer?.borderColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.2).cgColor
cardBackground.animator().layer?.shadowOpacity = 0.0
iconBackground.animator().layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.cgColor
titleLabel.animator().textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.8)
subtitleLabel.animator().textColor = ThemeManager.shared.secondaryTextColor
}
}
// Separate animation for icon bounce
if isSelected {
let bounceAnimation = CAKeyframeAnimation(keyPath: "transform.scale")
bounceAnimation.values = [1.0, 1.2, 1.0]
bounceAnimation.keyTimes = [0.0, 0.3, 1.0]
bounceAnimation.duration = 0.5
bounceAnimation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
iconBackground.layer?.add(bounceAnimation, forKey: "bounce")
}
} else {
// Immediate update without animation - adaptive colors
if isSelected {
cardBackground.layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.6).cgColor
cardBackground.layer?.borderColor = ThemeManager.shared.buttonTintColor.withAlphaComponent(0.8).cgColor
cardBackground.layer?.shadowOpacity = ThemeManager.shared.shadowOpacity
iconBackground.layer?.backgroundColor = ThemeManager.shared.buttonTintColor.withAlphaComponent(0.6).cgColor
titleLabel.textColor = ThemeManager.shared.primaryTextColor
subtitleLabel.textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.9)
} else {
cardBackground.layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.3).cgColor
cardBackground.layer?.borderColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.2).cgColor
cardBackground.layer?.shadowOpacity = 0.0
iconBackground.layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.cgColor
titleLabel.textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.8)
subtitleLabel.textColor = ThemeManager.shared.secondaryTextColor
}
}
}
private func updateEnabledState() {
if isEnabled {
alphaValue = 1.0
cardBackground.layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.3).cgColor
cardBackground.layer?.borderColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.2).cgColor
titleLabel.textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.8)
subtitleLabel.textColor = ThemeManager.shared.secondaryTextColor
iconBackground.layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.cgColor
iconImageView.contentTintColor = ThemeManager.shared.primaryTextColor
} else {
alphaValue = 0.5
cardBackground.layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.1).cgColor
cardBackground.layer?.borderColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.1).cgColor
titleLabel.textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.4)
subtitleLabel.textColor = ThemeManager.shared.secondaryTextColor.withAlphaComponent(0.5)
iconBackground.layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.withAlphaComponent(0.5).cgColor
iconImageView.contentTintColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.5)
}
updateTrackingAreas()
}
override func mouseEntered(with event: NSEvent) {
guard isEnabled else { return }
NSAnimationContext.runAnimationGroup { context in
context.duration = 0.2
context.timingFunction = CAMediaTimingFunction(name: .easeOut)
// Hover effect: slight scale and enhanced glow
cardBackground.animator().layer?.transform = CATransform3DMakeScale(1.02, 1.02, 1.0)
if isSelected {
cardBackground.animator().layer?.shadowOpacity = ThemeManager.shared.shadowOpacity * 1.5
} else {
cardBackground.animator().layer?.shadowOpacity = ThemeManager.shared.shadowOpacity * 0.8
cardBackground.animator().layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.4).cgColor
}
}
// Icon hover animation
let pulseAnimation = CABasicAnimation(keyPath: "transform.scale")
pulseAnimation.fromValue = 1.0
pulseAnimation.toValue = 1.1
pulseAnimation.duration = 0.15
pulseAnimation.autoreverses = true
iconBackground.layer?.add(pulseAnimation, forKey: "pulse")
}
override func mouseExited(with event: NSEvent) {
guard isEnabled else { return }
NSAnimationContext.runAnimationGroup { context in
context.duration = 0.2
context.timingFunction = CAMediaTimingFunction(name: .easeOut)
// Reset hover effect
cardBackground.animator().layer?.transform = CATransform3DIdentity
if isSelected {
cardBackground.animator().layer?.shadowOpacity = ThemeManager.shared.shadowOpacity
} else {
cardBackground.animator().layer?.shadowOpacity = 0.0
cardBackground.animator().layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.3).cgColor
}
}
iconBackground.layer?.removeAnimation(forKey: "pulse")
}
override func mouseDown(with event: NSEvent) {
guard isEnabled else {
// Show a subtle "disabled" feedback
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.1
self.animator().alphaValue = 0.3
}) {
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.1
self.animator().alphaValue = 0.5
})
}
return
}
// Click animation
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.1
cardBackground.animator().layer?.transform = CATransform3DMakeScale(0.98, 0.98, 1.0)
}) {
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.1
self.cardBackground.animator().layer?.transform = CATransform3DMakeScale(1.02, 1.02, 1.0)
})
}
// Handle click action
if let target = target, let action = action {
_ = target.perform(action, with: self)
}
}
// MARK: - Theme Management
private func updateThemeColors() {
// Update card background
if isSelected {
cardBackground.layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.6).cgColor
cardBackground.layer?.borderColor = ThemeManager.shared.buttonTintColor.withAlphaComponent(0.8).cgColor
iconBackground.layer?.backgroundColor = ThemeManager.shared.buttonTintColor.withAlphaComponent(0.6).cgColor
titleLabel.textColor = ThemeManager.shared.primaryTextColor
subtitleLabel.textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.9)
} else {
cardBackground.layer?.backgroundColor = ThemeManager.shared.secondaryContainerBackground.withAlphaComponent(0.3).cgColor
cardBackground.layer?.borderColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.2).cgColor
iconBackground.layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.cgColor
titleLabel.textColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.8)
subtitleLabel.textColor = ThemeManager.shared.secondaryTextColor
}
// Update icon color
iconImageView.contentTintColor = ThemeManager.shared.primaryTextColor
// Update shadow color
cardBackground.layer?.shadowColor = ThemeManager.shared.shadowColor.cgColor
// Update enabled state colors if disabled
if !isEnabled {
updateEnabledState()
}
}
}
// MARK: - Action Style Button (matches GridCellView style)
class ActionStyleButton: NSView {
private let label: NSTextField = NSTextField(labelWithString: "")
private let iconImageView: NSImageView = NSImageView()
private var iconBackground: NSView!
private var iconCenterConstraint: NSLayoutConstraint!
private var iconWidthConstraint: NSLayoutConstraint!
private var iconHeightConstraint: NSLayoutConstraint!
private var iconImageWidthConstraint: NSLayoutConstraint!
private var iconImageHeightConstraint: NSLayoutConstraint!
private let iconStartOffset: CGFloat = 30
private var isHovered: Bool = false
private var symbolName: String = ""
var isSmallButton: Bool = false {
didSet { updateForSmallButton() }
}
var isEnabled: Bool = true {
didSet { updateEnabledState() }
}
// Button action properties
var target: AnyObject?
var action: Selector?
init(frame frameRect: NSRect, text: String, symbolName: String) {
self.symbolName = symbolName
super.init(frame: frameRect)
setupButton(text: text, symbolName: symbolName)
// Setup theme change observer
ThemeManager.shared.observeThemeChanges { [weak self] in
DispatchQueue.main.async {
self?.updateThemeColors()
}
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func setupButton(text: String, symbolName: String) {
wantsLayer = true
// layer?.backgroundColor = NSColor.red.withAlphaComponent(0.3).cgColor // DEBUG BG
// Mouse tracking for hover effects - will be updated in updateTrackingAreas
updateTrackingAreas()
layer?.cornerRadius = 0
layer?.masksToBounds = false
// Label setup - adaptive colors
label.stringValue = text
label.font = NSFont.systemFont(ofSize: 12, weight: .semibold)
label.textColor = ThemeManager.shared.primaryTextColor
label.alignment = .left
label.alphaValue = text.isEmpty ? 0 : 0.3 // Hidden for small buttons
label.translatesAutoresizingMaskIntoConstraints = false
label.usesSingleLineMode = true
label.lineBreakMode = .byTruncatingTail
addSubview(label)
// Circular icon background - adaptive colors
let iconBackground = NSView()
iconBackground.wantsLayer = true
iconBackground.translatesAutoresizingMaskIntoConstraints = false
iconBackground.layer?.cornerRadius = 18 // 36×36 circle (will be adjusted for small buttons)
iconBackground.layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.cgColor
addSubview(iconBackground)
// SF Symbol icon
let baseImage = NSImage(systemSymbolName: symbolName, accessibilityDescription: nil)
// Adjusted pointSize for normal state for better proportion within the 18x18 constraint
let configuredImage = baseImage?.withSymbolConfiguration(NSImage.SymbolConfiguration(pointSize: 16, weight: .semibold))
iconImageView.image = configuredImage
iconImageView.contentTintColor = ThemeManager.shared.primaryTextColor
iconImageView.translatesAutoresizingMaskIntoConstraints = false
iconImageView.imageScaling = .scaleProportionallyDown
iconImageView.imageAlignment = .alignCenter
iconImageView.wantsLayer = true
iconBackground.addSubview(iconImageView)
// Store reference to icon background for small button adjustments
self.iconBackground = iconBackground
// Layout constraints
iconCenterConstraint = iconBackground.centerXAnchor.constraint(equalTo: centerXAnchor, constant: iconStartOffset)
iconWidthConstraint = iconBackground.widthAnchor.constraint(equalToConstant: 36)
iconHeightConstraint = iconBackground.heightAnchor.constraint(equalTo: iconBackground.widthAnchor)
// Constraints for the iconImageView itself, to ensure it's centered and sized within iconBackground
iconImageWidthConstraint = iconImageView.widthAnchor.constraint(equalToConstant: 18) // Initial size for normal buttons
iconImageHeightConstraint = iconImageView.heightAnchor.constraint(equalToConstant: 18) // Initial size for normal buttons
NSLayoutConstraint.activate([
iconCenterConstraint,
iconBackground.centerYAnchor.constraint(equalTo: centerYAnchor),
iconWidthConstraint,
iconHeightConstraint,
iconImageView.centerXAnchor.constraint(equalTo: iconBackground.centerXAnchor),
iconImageView.centerYAnchor.constraint(equalTo: iconBackground.centerYAnchor),
iconImageWidthConstraint, // Activate new constraint
iconImageHeightConstraint, // Activate new constraint
label.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 12),
label.trailingAnchor.constraint(equalTo: iconBackground.leadingAnchor, constant: -8),
label.centerYAnchor.constraint(equalTo: iconBackground.centerYAnchor)
])
// Initial transform
layer?.anchorPoint = CGPoint(x: 0.5, y: 0.5)
layer?.transform = CATransform3DIdentity
// Set anchor point for icon background to center for proper scaling
// This must be done BEFORE adding constraints to prevent position jumping
iconBackground.layer?.anchorPoint = CGPoint(x: 0.5, y: 0.5)
// Add subtle glow to label - adaptive colors
let shadow = NSShadow()
shadow.shadowColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.4)
shadow.shadowBlurRadius = 4
shadow.shadowOffset = NSSize(width: 0, height: -1)
label.shadow = shadow
}
override func updateTrackingAreas() {
super.updateTrackingAreas()
// Remove existing tracking areas
for trackingArea in trackingAreas {
removeTrackingArea(trackingArea)
}
// Add tracking area covering the entire button bounds with some extra margin for easier hovering
let expandedRect = bounds.insetBy(dx: -5, dy: -5)
let trackingArea = NSTrackingArea(rect: expandedRect,
options: [.mouseEnteredAndExited, .activeAlways],
owner: self,
userInfo: nil)
addTrackingArea(trackingArea)
}
private func updateForSmallButton() {
if isSmallButton {
// For small buttons (info, close), center the icon and make it smaller
iconCenterConstraint.constant = 0
label.alphaValue = 0
iconWidthConstraint.constant = 24 // iconBackground size
iconBackground.layer?.cornerRadius = 12
iconBackground.alphaValue = 0.8
// Smaller icon for small buttons
let baseImage = NSImage(systemSymbolName: symbolName, accessibilityDescription: nil)
// pointSize 10 for 12x12 constraint seems reasonable.
let configuredImage = baseImage?.withSymbolConfiguration(NSImage.SymbolConfiguration(pointSize: 10, weight: .semibold))
iconImageView.image = configuredImage
iconImageWidthConstraint.constant = 12 // Update constraint for icon image
iconImageHeightConstraint.constant = 12 // Update constraint for icon image
} else { // This is the NORMAL/ENABLED state for the 5 action buttons
iconCenterConstraint.constant = iconStartOffset
label.alphaValue = label.stringValue.isEmpty ? 0 : 0.3
iconWidthConstraint.constant = 36 // iconBackground size
iconBackground.layer?.cornerRadius = 18
iconBackground.alphaValue = 1.0
// Reset icon for normal buttons
let baseImage = NSImage(systemSymbolName: symbolName, accessibilityDescription: nil)
// Adjusted pointSize for normal state for better proportion
let configuredImage = baseImage?.withSymbolConfiguration(NSImage.SymbolConfiguration(pointSize: 16, weight: .semibold))
iconImageView.image = configuredImage
iconImageWidthConstraint.constant = 18 // Reset constraint for icon image
iconImageHeightConstraint.constant = 18 // Reset constraint for icon image
}
// Update tracking areas when button type changes
updateTrackingAreas()
}
private func updateEnabledState() {
// Update tracking areas based on enabled state
updateTrackingAreas()
}
func setIconScale(_ scale: CGFloat) {
// Scale the icon background (which contains the icon) with animation
NSAnimationContext.runAnimationGroup { context in
context.duration = 2.0 // <<<< INCREASED DURATION HERE
context.timingFunction = CAMediaTimingFunction(name: .easeOut)
self.iconBackground.animator().layer?.transform = CATransform3DMakeScale(scale, scale, 1.0)
}
}
func setIconScaleImmediate(_ scale: CGFloat) {
// Scale the icon background immediately without animation
self.iconBackground.layer?.transform = CATransform3DMakeScale(scale, scale, 1.0)
}
override func mouseEntered(with event: NSEvent) {
guard isEnabled else { return }
if isSmallButton {
// Zoom effect for small buttons (info, close) - anchor point already set in setupButton
// Create scale animation
let scaleAnimation = CABasicAnimation(keyPath: "transform.scale")
scaleAnimation.fromValue = 1.0
scaleAnimation.toValue = 1.2
scaleAnimation.duration = 0.2
scaleAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
scaleAnimation.fillMode = .forwards
scaleAnimation.isRemovedOnCompletion = false
iconBackground.layer?.add(scaleAnimation, forKey: "scaleUp")
iconBackground.layer?.transform = CATransform3DMakeScale(1.2, 1.2, 1.0)
// Alpha animation
NSAnimationContext.runAnimationGroup { ctx in
ctx.duration = 0.2
ctx.timingFunction = CAMediaTimingFunction(name: .easeOut)
self.iconBackground.animator().alphaValue = 1.0
}
} else {
// Slide effect for regular buttons
let shift = (self.bounds.width / 2) - 18 - 2
NSAnimationContext.runAnimationGroup { ctx in
ctx.duration = 0.15
ctx.timingFunction = CAMediaTimingFunction(name: .easeOut)
self.iconCenterConstraint.animator().constant = shift
self.label.animator().alphaValue = 1.0
}
}
}
override func mouseExited(with event: NSEvent) {
guard isEnabled else { return }
if isSmallButton {
// Reset zoom for small buttons
iconBackground.layer?.removeAnimation(forKey: "scaleUp")
let scaleAnimation = CABasicAnimation(keyPath: "transform.scale")
scaleAnimation.fromValue = 1.2
scaleAnimation.toValue = 1.0
scaleAnimation.duration = 0.2
scaleAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
scaleAnimation.fillMode = .forwards
scaleAnimation.isRemovedOnCompletion = false
iconBackground.layer?.add(scaleAnimation, forKey: "scaleDown")
iconBackground.layer?.transform = CATransform3DIdentity
// Alpha animation
NSAnimationContext.runAnimationGroup { context in
context.duration = 0.2
context.timingFunction = CAMediaTimingFunction(name: .easeOut)
self.iconBackground.animator().alphaValue = 0.8
}
} else {
// Reset slide for regular buttons
NSAnimationContext.runAnimationGroup { context in
context.duration = 0.15
context.timingFunction = CAMediaTimingFunction(name: .easeOut)
self.iconCenterConstraint.animator().constant = self.iconStartOffset
self.label.animator().alphaValue = 0.3
}
}
}
override func mouseDown(with event: NSEvent) {
guard isEnabled else { return }
// Visual feedback for click
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.1
if isSmallButton {
self.iconBackground.layer?.transform = CATransform3DMakeScale(0.9, 0.9, 1.0)
} else {
self.animator().alphaValue = 0.8
}
}) {
NSAnimationContext.runAnimationGroup({ context in
context.duration = 0.1
if self.isSmallButton {
self.iconBackground.layer?.transform = CATransform3DMakeScale(1.2, 1.2, 1.0)
} else {
self.animator().alphaValue = 1.0
}
})
}
// Handle click action
if let target = target, let action = action {
print("🔘 ActionStyleButton clicked: \(symbolName)")
_ = target.perform(action, with: self)
}
}
func setIconDisabledLook(disabled: Bool) {
if disabled {
iconImageView.alphaValue = 0.5 // Make the SF Symbol itself more transparent
} else {
iconImageView.alphaValue = 1.0 // Normal visibility for the icon
}
}
// MARK: - Theme Management
private func updateThemeColors() {
// Update label color
label.textColor = ThemeManager.shared.primaryTextColor
// Update icon background color
iconBackground.layer?.backgroundColor = ThemeManager.shared.gridCellIconBackground.cgColor
// Update icon tint color
iconImageView.contentTintColor = ThemeManager.shared.primaryTextColor
// Update label shadow
let shadow = NSShadow()
shadow.shadowColor = ThemeManager.shared.primaryTextColor.withAlphaComponent(0.4)
shadow.shadowBlurRadius = 4
shadow.shadowOffset = NSSize(width: 0, height: -1)
label.shadow = shadow
// Update enabled state if disabled
if !isEnabled {
updateEnabledState()
}
}
}
// MARK: - Legacy code removed - use thumbnail-based BGR workflow instead