diff --git a/Package.swift b/Package.swift index 3ea312104..3fbdcd4f3 100644 --- a/Package.swift +++ b/Package.swift @@ -15,6 +15,14 @@ let package = Package( name: "SnapshotTesting", targets: ["SnapshotTesting"] ), + .library( + name: "SnapshotTestingPlugin", + targets: ["SnapshotTestingPlugin"] + ), + .library( + name: "ImageSerializationPlugin", + targets: ["ImageSerializationPlugin"] + ), .library( name: "InlineSnapshotTesting", targets: ["InlineSnapshotTesting"] @@ -25,7 +33,13 @@ let package = Package( ], targets: [ .target( - name: "SnapshotTesting" + name: "SnapshotTesting", + dependencies: ["ImageSerializationPlugin"] + ), + .target(name: "SnapshotTestingPlugin"), + .target( + name: "ImageSerializationPlugin", + dependencies: ["SnapshotTestingPlugin"] ), .target( name: "InlineSnapshotTesting", diff --git a/README.md b/README.md index 5782f945a..4bd214563 100644 --- a/README.md +++ b/README.md @@ -230,7 +230,7 @@ targets: [ [available-strategies]: https://swiftpackageindex.com/pointfreeco/swift-snapshot-testing/main/documentation/snapshottesting/snapshotting [defining-strategies]: https://swiftpackageindex.com/pointfreeco/swift-snapshot-testing/main/documentation/snapshottesting/customstrategies -## Plug-ins +## Strategies / Plug-ins - [AccessibilitySnapshot](https://github.com/cashapp/AccessibilitySnapshot) adds easy regression testing for iOS accessibility. @@ -273,6 +273,18 @@ targets: [ - [SnapshotVision](https://github.com/gregersson/swift-snapshot-testing-vision) adds snapshot strategy for text recognition on views and images. Uses Apples Vision framework. + - [ImageSerializer HEIC](https://github.com/mackoj/swift-snapshot-testing-plugin-heic) make all the + strategy that create image as output to store them in `.heic` storage format which reduces file sizes + in comparison to PNG. + + - [ImageSerializer WEBP](https://github.com/mackoj/swift-snapshot-testing-plugin-heic) make all the + strategy that create image as output to store them in `.webp` storage format which reduces file sizes + in comparison to PNG. + + - [ImageSerializer JXL](https://github.com/mackoj/swift-snapshot-testing-plugin-heic) make all the + strategy that create image as output to store them in `.jxl` storage format which reduces file sizes + in comparison to PNG. + Have you written your own SnapshotTesting plug-in? [Add it here](https://github.com/pointfreeco/swift-snapshot-testing/edit/master/README.md) and submit a pull request! diff --git a/Sources/ImageSerializationPlugin/ImageSerializationPlugin.swift b/Sources/ImageSerializationPlugin/ImageSerializationPlugin.swift new file mode 100644 index 000000000..f50901e76 --- /dev/null +++ b/Sources/ImageSerializationPlugin/ImageSerializationPlugin.swift @@ -0,0 +1,87 @@ +#if canImport(SwiftUI) +import Foundation +import SnapshotTestingPlugin + +#if canImport(UIKit) +import UIKit.UIImage +/// A type alias for `UIImage` when UIKit is available. +public typealias SnapImage = UIImage +#elseif canImport(AppKit) +import AppKit.NSImage +/// A type alias for `NSImage` when AppKit is available. +public typealias SnapImage = NSImage +#endif + +/// A type alias that combines `ImageSerialization` and `SnapshotTestingPlugin` protocols. +/// +/// `ImageSerializationPlugin` is a convenient alias used to conform to both `ImageSerialization` and `SnapshotTestingPlugin` protocols. +/// This allows for image serialization plugins that also support snapshot testing, leveraging the Objective-C runtime while maintaining image serialization capabilities. +public typealias ImageSerializationPlugin = ImageSerialization & SnapshotTestingPlugin + +// TODO: async throws will be added later to encodeImage and decodeImage +/// A protocol that defines methods for encoding and decoding images in various formats. +/// +/// The `ImageSerialization` protocol is intended for classes that provide functionality to serialize (encode) and deserialize (decode) images. +/// Implementing this protocol allows a class to specify the image format it supports and to handle image data conversions. +/// This protocol is designed to be used in environments where SwiftUI is available and supports platform-specific image types via `SnapImage`. +public protocol ImageSerialization { + + /// The image format that the serialization plugin supports. + /// + /// Each conforming class must specify the format it handles, using the `ImageSerializationFormat` enum. This property helps the `ImageSerializer` + /// determine which plugin to use for a given format during image encoding and decoding. + static var imageFormat: ImageSerializationFormat { get } + + /// Encodes a `SnapImage` into a data representation. + /// + /// This method converts the provided image into the appropriate data format. It may eventually support asynchronous operations and error handling using `async throws`. + /// + /// - Parameter image: The image to be encoded. + /// - Returns: The encoded image data, or `nil` if encoding fails. + func encodeImage(_ image: SnapImage) -> Data? + + /// Decodes image data into a `SnapImage`. + /// + /// This method converts the provided data back into an image. It may eventually support asynchronous operations and error handling using `async throws`. + /// + /// - Parameter data: The image data to be decoded. + /// - Returns: The decoded image, or `nil` if decoding fails. + func decodeImage(_ data: Data) -> SnapImage? +} +#endif + +/// An enumeration that defines the image formats supported by the `ImageSerialization` protocol. +/// +/// The `ImageSerializationFormat` enum is used to represent various image formats. It includes a predefined case for PNG images and a flexible case for plugins, +/// allowing for the extension of formats via plugins identified by unique string values. +public enum ImageSerializationFormat: RawRepresentable, Sendable, Equatable { + /// Represents the default image format aka PNG. + case png + + /// Represents a custom image format provided by a plugin. + /// + /// This case allows for the extension of image formats beyond the predefined ones by using a unique string identifier. + case plugins(String) + + /// Initializes an `ImageSerializationFormat` instance from a raw string value. + /// + /// This initializer converts a string value into an appropriate `ImageSerializationFormat` case. + /// + /// - Parameter rawValue: The string representation of the image format. + public init?(rawValue: String) { + switch rawValue { + case "png": self = .png + default: self = .plugins(rawValue) + } + } + + /// The raw string value of the `ImageSerializationFormat`. + /// + /// This computed property returns the string representation of the current image format. + public var rawValue: String { + switch self { + case .png: return "png" + case let .plugins(value): return value + } + } +} diff --git a/Sources/SnapshotTesting/AssertSnapshot.swift b/Sources/SnapshotTesting/AssertSnapshot.swift index 8837fd9db..9761433cd 100644 --- a/Sources/SnapshotTesting/AssertSnapshot.swift +++ b/Sources/SnapshotTesting/AssertSnapshot.swift @@ -1,4 +1,5 @@ import XCTest +import ImageSerializationPlugin #if canImport(Testing) // NB: We are importing only the implementation of Testing because that framework is not available @@ -6,6 +7,43 @@ import XCTest @_implementationOnly import Testing #endif +/// Whether or not to change the default output image format to something else. +@available( + *, + deprecated, + message: + "Use 'withSnapshotTesting' to customize the image output format. See the documentation for more information." +) +public var imageFormat: ImageSerializationFormat { + get { + _imageFormat + } + set { _imageFormat = newValue } +} + +@_spi(Internals) +public var _imageFormat: ImageSerializationFormat { + get { +#if canImport(Testing) + if let test = Test.current { + for trait in test.traits.reversed() { + if let diffTool = (trait as? _SnapshotsTestTrait)?.configuration.imageFormat { + return diffTool + } + } + } +#endif + return __imageFormat + } + set { + __imageFormat = newValue + } +} + +@_spi(Internals) +public var __imageFormat: ImageSerializationFormat = .png + + /// Enhances failure messages with a command line diff tool expression that can be copied and pasted /// into a terminal. @available( diff --git a/Sources/SnapshotTesting/Documentation.docc/Articles/Plugins.md b/Sources/SnapshotTesting/Documentation.docc/Articles/Plugins.md new file mode 100644 index 000000000..42ef29e31 --- /dev/null +++ b/Sources/SnapshotTesting/Documentation.docc/Articles/Plugins.md @@ -0,0 +1,27 @@ +# Plugins + +SnapshotTesting offers a wide range of built-in snapshot strategies, and over the years, third-party developers have introduced new ones. However, when there’s a need for functionality that spans multiple strategies, plugins become essential. + +## Overview + +Plugins provide greater flexibility and extensibility by enabling shared behavior across different strategies without the need to duplicate code or modify each strategy individually. They can be dynamically discovered, registered, and executed at runtime, making them ideal for adding new functionality without altering the core system. This architecture promotes modularity and decoupling, allowing features to be easily added or swapped out without impacting existing functionality. + +### Plugin architecture + +The plugin architecture is designed around the concept of **dynamic discovery and registration**. Plugins conform to specific protocols, such as `SnapshotTestingPlugin`, and are registered automatically by the `PluginRegistry`. This registry manages plugin instances, allowing them to be retrieved by identifier or filtered by the protocols they conform to. + +The primary components of the plugin system include: + +- **Plugin Protocols**: Define the behavior that plugins must implement. +- **PluginRegistry**: Manages plugin discovery, registration, and retrieval. +- **Objective-C Runtime Integration**: Allows automatic discovery of plugins that conform to specific protocols. + +The `PluginRegistry` is a singleton that registers plugins during its initialization. Plugins can be retrieved by their identifier or cast to specific types, allowing flexible interaction. + +## ImageSerializer + +The `ImageSerializer` is a plugin-based system that provides support for encoding and decoding images. It leverages the plugin architecture to extend its support for different image formats without needing to modify the core system. + +Plugins that conform to the `ImageSerializationPlugin` protocol can be registered into the `PluginRegistry` and used to encode or decode images in different formats, such as PNG, JPEG, WebP, HEIC, and more. + +When a plugin supporting a specific image format is available, the `ImageSerializer` can dynamically choose the correct plugin based on the image format required, ensuring modularity and scalability in image handling. diff --git a/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md b/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md index 8704d920d..42ed0d4e2 100644 --- a/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md +++ b/Sources/SnapshotTesting/Documentation.docc/SnapshotTesting.md @@ -23,6 +23,10 @@ Powerfully flexible snapshot testing. - ``withSnapshotTesting(record:diffTool:operation:)-2kuyr`` - ``SnapshotTestingConfiguration`` +### Plugins + +- + ### Deprecations - diff --git a/Sources/SnapshotTesting/Plugins/ImageSerializer.swift b/Sources/SnapshotTesting/Plugins/ImageSerializer.swift new file mode 100644 index 000000000..356b7f848 --- /dev/null +++ b/Sources/SnapshotTesting/Plugins/ImageSerializer.swift @@ -0,0 +1,99 @@ +#if canImport(SwiftUI) +import Foundation +import ImageSerializationPlugin + +#if canImport(UIKit) +import UIKit +#elseif canImport(AppKit) +import AppKit +#endif + +/// A class responsible for encoding and decoding images using various image serialization plugins. +/// +/// The `ImageSerializer` class leverages plugins that conform to the `ImageSerialization` protocol to encode and decode images in different formats. +/// It automatically retrieves all available image serialization plugins from the `PluginRegistry` and uses them based on the specified `ImageSerializationFormat`. +/// If no plugin is found for the requested format, it defaults to using PNG encoding/decoding. +public class ImageSerializer { + + /// A collection of plugins that conform to the `ImageSerialization` protocol. + let plugins: [ImageSerialization] + + public init() { + self.plugins = PluginRegistry.allPlugins() + } + + // TODO: async throws will be added later + /// Encodes a given image into the specified image format using the appropriate plugin. + /// + /// This method attempts to encode the provided `SnapImage` into the desired format using the first plugin that supports the specified `ImageSerializationFormat`. + /// If no plugin is found for the format, it defaults to encoding the image as PNG. + /// + /// - Parameters: + /// - image: The `SnapImage` to encode. + /// - imageFormat: The format in which to encode the image. + /// - Returns: The encoded image data, or `nil` if encoding fails. + public func encodeImage(_ image: SnapImage, imageFormat: ImageSerializationFormat) /*async throws*/ -> Data? { + for plugin in self.plugins { + if type(of: plugin).imageFormat == imageFormat { + return plugin.encodeImage(image) + } + } + // Default to PNG + return encodePNG(image) + } + + // TODO: async throws will be added later + /// Decodes image data into a `SnapImage` using the appropriate plugin based on the specified image format. + /// + /// This method attempts to decode the provided data into a `SnapImage` using the first plugin that supports the specified `ImageSerializationFormat`. + /// If no plugin is found for the format, it defaults to decoding the data as PNG. + /// + /// - Parameters: + /// - data: The image data to decode. + /// - imageFormat: The format in which the image data is encoded. + /// - Returns: The decoded `SnapImage`, or `nil` if decoding fails. + public func decodeImage(_ data: Data, imageFormat: ImageSerializationFormat) /*async throws*/ -> SnapImage? { + for plugin in self.plugins { + if type(of: plugin).imageFormat == imageFormat { + return plugin.decodeImage(data) + } + } + // Default to PNG + return decodePNG(data) + } + + // MARK: - Actual default Image Serializer + + /// Encodes a `SnapImage` as PNG data. + /// + /// This method provides a default implementation for encoding images as PNG. It is used as a fallback if no suitable plugin is found for the requested format. + /// + /// - Parameter image: The `SnapImage` to encode. + /// - Returns: The encoded PNG data, or `nil` if encoding fails. + private func encodePNG(_ image: SnapImage) -> Data? { +#if canImport(UIKit) + return image.pngData() +#elseif canImport(AppKit) + guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else { + return nil + } + let bitmapRep = NSBitmapImageRep(cgImage: cgImage) + return bitmapRep.representation(using: .png, properties: [:]) +#endif + } + + /// Decodes PNG data into a `SnapImage`. + /// + /// This method provides a default implementation for decoding PNG data into a `SnapImage`. It is used as a fallback if no suitable plugin is found for the requested format. + /// + /// - Parameter data: The PNG data to decode. + /// - Returns: The decoded `SnapImage`, or `nil` if decoding fails. + private func decodePNG(_ data: Data) -> SnapImage? { +#if canImport(UIKit) + return UIImage(data: data) +#elseif canImport(AppKit) + return NSImage(data: data) +#endif + } +} +#endif diff --git a/Sources/SnapshotTesting/Plugins/PluginRegistry.swift b/Sources/SnapshotTesting/Plugins/PluginRegistry.swift new file mode 100644 index 000000000..63c64af4c --- /dev/null +++ b/Sources/SnapshotTesting/Plugins/PluginRegistry.swift @@ -0,0 +1,99 @@ +#if canImport(SwiftUI) && canImport(ObjectiveC) +import Foundation +import ObjectiveC.runtime +import ImageSerializationPlugin +import SnapshotTestingPlugin + +/// A singleton class responsible for managing and registering plugins conforming to the `SnapshotTestingPlugin` protocol. +/// +/// The `PluginRegistry` automatically discovers and registers classes conforming to the `SnapshotTestingPlugin` protocol +/// within the Objective-C runtime. It allows retrieval of specific plugins by identifier, access to all registered plugins, +/// and filtering of plugins that conform to the `ImageSerialization` protocol. +public class PluginRegistry { + + /// Shared singleton instance of `PluginRegistry`. + private static let shared = PluginRegistry() + + /// Dictionary holding registered plugins, keyed by their identifier. + private var plugins: [String: AnyObject] = [:] + + /// Private initializer enforcing the singleton pattern. + /// + /// Automatically triggers `automaticPluginRegistration()` to discover and register plugins. + private init() { + defer { automaticPluginRegistration() } + } + + // MARK: - Public Methods + + /// Registers a plugin. + /// + /// - Parameter plugin: An instance conforming to `SnapshotTestingPlugin`. + public static func registerPlugin(_ plugin: SnapshotTestingPlugin) { + PluginRegistry.shared.registerPlugin(plugin) + } + + /// Retrieves a plugin by its identifier, casting it to the specified type. + /// + /// - Parameter identifier: The unique identifier for the plugin. + /// - Returns: The plugin instance cast to `Output` if found and castable, otherwise `nil`. + public static func plugin(for identifier: String) -> Output? { + PluginRegistry.shared.plugin(for: identifier) + } + + /// Returns all registered plugins cast to the specified type. + /// + /// - Returns: An array of all registered plugins that can be cast to `Output`. + public static func allPlugins() -> [Output] { + PluginRegistry.shared.allPlugins() + } + + // MARK: - Internal Methods + + /// Registers a plugin. + /// + /// - Parameter plugin: An instance conforming to `SnapshotTestingPlugin`. + private func registerPlugin(_ plugin: SnapshotTestingPlugin) { + plugins[type(of: plugin).identifier] = plugin + } + + /// Retrieves a plugin by its identifier, casting it to the specified type. + /// + /// - Parameter identifier: The unique identifier for the plugin. + /// - Returns: The plugin instance cast to `Output` if found and castable, otherwise `nil`. + private func plugin(for identifier: String) -> Output? { + return plugins[identifier] as? Output + } + + /// Returns all registered plugins cast to the specified type. + /// + /// - Returns: An array of all registered plugins that can be cast to `Output`. + private func allPlugins() -> [Output] { + return Array(plugins.values.compactMap { $0 as? Output }) + } + + /// Discovers and registers all classes conforming to the `SnapshotTestingPlugin` protocol. + /// + /// This method iterates over all Objective-C runtime classes, identifying those that conform to `SnapshotTestingPlugin`, + /// instantiating them, and registering them as plugins. + private func automaticPluginRegistration() { + let classCount = objc_getClassList(nil, 0) + guard classCount > 0 else { return } + + let classes = UnsafeMutablePointer.allocate(capacity: Int(classCount)) + defer { classes.deallocate() } + + let autoreleasingClasses = AutoreleasingUnsafeMutablePointer(classes) + objc_getClassList(autoreleasingClasses, classCount) + + for i in 0..( record: SnapshotTestingConfiguration.Record? = nil, diffTool: SnapshotTestingConfiguration.DiffTool? = nil, + imageFormat: ImageSerializationFormat? = nil, operation: () throws -> R ) rethrows -> R { try SnapshotTestingConfiguration.$current.withValue( SnapshotTestingConfiguration( record: record ?? SnapshotTestingConfiguration.current?.record ?? _record, - diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool - ?? SnapshotTesting._diffTool + diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool ?? SnapshotTesting._diffTool, + imageFormat: imageFormat ?? SnapshotTestingConfiguration.current?.imageFormat ?? _imageFormat ) ) { try operation() @@ -45,12 +48,14 @@ public func withSnapshotTesting( public func withSnapshotTesting( record: SnapshotTestingConfiguration.Record? = nil, diffTool: SnapshotTestingConfiguration.DiffTool? = nil, + imageFormat: ImageSerializationFormat? = nil, operation: () async throws -> R ) async rethrows -> R { try await SnapshotTestingConfiguration.$current.withValue( SnapshotTestingConfiguration( record: record ?? SnapshotTestingConfiguration.current?.record ?? _record, - diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool ?? _diffTool + diffTool: diffTool ?? SnapshotTestingConfiguration.current?.diffTool ?? _diffTool, + imageFormat: imageFormat ?? SnapshotTestingConfiguration.current?.imageFormat ?? _imageFormat ) ) { try await operation() @@ -71,13 +76,17 @@ public struct SnapshotTestingConfiguration: Sendable { /// /// See ``Record-swift.struct`` for more information. public var record: Record? + + public var imageFormat: ImageSerializationFormat? public init( record: Record?, - diffTool: DiffTool? + diffTool: DiffTool?, + imageFormat: ImageSerializationFormat? ) { self.diffTool = diffTool self.record = record + self.imageFormat = imageFormat } /// The record mode of the snapshot test. diff --git a/Sources/SnapshotTesting/SnapshotsTestTrait.swift b/Sources/SnapshotTesting/SnapshotsTestTrait.swift index 95c4b7915..dd3905865 100644 --- a/Sources/SnapshotTesting/SnapshotsTestTrait.swift +++ b/Sources/SnapshotTesting/SnapshotsTestTrait.swift @@ -2,6 +2,7 @@ // NB: We are importing only the implementation of Testing because that framework is not available // in Xcode UI test targets. @_implementationOnly import Testing + import ImageSerializationPlugin @_spi(Experimental) extension Trait where Self == _SnapshotsTestTrait { @@ -12,12 +13,14 @@ /// - diffTool: The diff tool to use in failure messages. public static func snapshots( record: SnapshotTestingConfiguration.Record? = nil, - diffTool: SnapshotTestingConfiguration.DiffTool? = nil + diffTool: SnapshotTestingConfiguration.DiffTool? = nil, + imageFormat: ImageSerializationFormat? = nil ) -> Self { _SnapshotsTestTrait( configuration: SnapshotTestingConfiguration( record: record, - diffTool: diffTool + diffTool: diffTool, + imageFormat: imageFormat ) ) } diff --git a/Sources/SnapshotTesting/Snapshotting/CALayer.swift b/Sources/SnapshotTesting/Snapshotting/CALayer.swift index 74c512c12..5f4f4a1bf 100644 --- a/Sources/SnapshotTesting/Snapshotting/CALayer.swift +++ b/Sources/SnapshotTesting/Snapshotting/CALayer.swift @@ -1,3 +1,4 @@ +import ImageSerializationPlugin #if os(macOS) import AppKit import Cocoa @@ -14,7 +15,7 @@ /// assertSnapshot(of: layer, as: .image(precision: 0.99)) /// ``` public static var image: Snapshotting { - return .image(precision: 1) + return .image(precision: 1, imageFormat: imageFormat) } /// A snapshot strategy for comparing layers based on pixel equality. @@ -25,9 +26,9 @@ /// match. 98-99% mimics /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. - public static func image(precision: Float, perceptualPrecision: Float = 1) -> Snapshotting { + public static func image(precision: Float, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { layer in let image = NSImage(size: layer.bounds.size) image.lockFocus() @@ -46,7 +47,7 @@ extension Snapshotting where Value == CALayer, Format == UIImage { /// A snapshot strategy for comparing layers based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing layers based on pixel equality. @@ -59,12 +60,12 @@ /// human eye. /// - traits: A trait collection override. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init() + precision: Float = 1, perceptualPrecision: Float = 1, traits: UITraitCollection = .init(), imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).pullback { layer in renderer(bounds: layer.bounds, for: traits).image { ctx in layer.setNeedsLayout() diff --git a/Sources/SnapshotTesting/Snapshotting/CGPath.swift b/Sources/SnapshotTesting/Snapshotting/CGPath.swift index 65470605c..368ab5196 100644 --- a/Sources/SnapshotTesting/Snapshotting/CGPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/CGPath.swift @@ -1,4 +1,6 @@ +import ImageSerializationPlugin #if os(macOS) + import AppKit import Cocoa import CoreGraphics @@ -6,7 +8,7 @@ extension Snapshotting where Value == CGPath, Format == NSImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -29,10 +31,11 @@ public static func image( precision: Float = 1, perceptualPrecision: Float = 1, - drawingMode: CGPathDrawingMode = .eoFill + drawingMode: CGPathDrawingMode = .eoFill, + imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { path in let bounds = path.boundingBoxOfPath var transform = CGAffineTransform(translationX: -bounds.origin.x, y: -bounds.origin.y) @@ -52,10 +55,11 @@ #elseif os(iOS) || os(tvOS) import UIKit + extension Snapshotting where Value == CGPath, Format == UIImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -68,10 +72,10 @@ /// human eye. public static func image( precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1, - drawingMode: CGPathDrawingMode = .eoFill + drawingMode: CGPathDrawingMode = .eoFill, imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale + precision: precision, perceptualPrecision: perceptualPrecision, scale: scale, imageFormat: imageFormat ).pullback { path in let bounds = path.boundingBoxOfPath let format: UIGraphicsImageRendererFormat diff --git a/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift b/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift index b84a59bf3..8577ef296 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSBezierPath.swift @@ -1,11 +1,12 @@ #if os(macOS) import AppKit import Cocoa + import ImageSerializationPlugin extension Snapshotting where Value == NSBezierPath, Format == NSImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -24,9 +25,9 @@ /// match. 98-99% mimics /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { path in // Move path info frame: let bounds = path.bounds diff --git a/Sources/SnapshotTesting/Snapshotting/NSImage.swift b/Sources/SnapshotTesting/Snapshotting/NSImage.swift index be4fd7cd4..c8978e27c 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSImage.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSImage.swift @@ -1,10 +1,11 @@ #if os(macOS) import Cocoa import XCTest + import ImageSerializationPlugin extension Diffing where Value == NSImage { /// A pixel-diffing strategy for NSImage's which requires a 100% match. - public static let image = Diffing.image() + public static let image = Diffing.image(imageFormat: imageFormat) /// A pixel-diffing strategy for NSImage that allows customizing how precise the matching must be. /// @@ -15,14 +16,15 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - Returns: A new diffing strategy. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Diffing { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat = imageFormat) -> Diffing { + let imageSerializer = ImageSerializer() return .init( - toData: { NSImagePNGRepresentation($0)! }, - fromData: { NSImage(data: $0)! } + toData: { imageSerializer.encodeImage($0, imageFormat: imageFormat)! }, + fromData: { imageSerializer.decodeImage($0, imageFormat: imageFormat)! } ) { old, new in guard let message = compare( - old, new, precision: precision, perceptualPrecision: perceptualPrecision) + old, new, precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat) else { return nil } let difference = SnapshotTesting.diff(old, new) let oldAttachment = XCTAttachment(image: old) @@ -42,7 +44,7 @@ extension Snapshotting where Value == NSImage, Format == NSImage { /// A snapshot strategy for comparing images based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing images based on pixel equality. @@ -53,24 +55,15 @@ /// match. 98-99% mimics /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1) -> Snapshotting { + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, imageFormat: ImageSerializationFormat = imageFormat) -> Snapshotting { return .init( - pathExtension: "png", - diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision) + pathExtension: imageFormat.rawValue, + diffing: .image(precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat) ) } } - private func NSImagePNGRepresentation(_ image: NSImage) -> Data? { - guard let cgImage = image.cgImage(forProposedRect: nil, context: nil, hints: nil) else { - return nil - } - let rep = NSBitmapImageRep(cgImage: cgImage) - rep.size = image.size - return rep.representation(using: .png, properties: [:]) - } - - private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float) + private func compare(_ old: NSImage, _ new: NSImage, precision: Float, perceptualPrecision: Float, imageFormat: ImageSerializationFormat) -> String? { guard let oldCgImage = old.cgImage(forProposedRect: nil, context: nil, hints: nil) else { @@ -93,9 +86,10 @@ } let byteCount = oldContext.height * oldContext.bytesPerRow if memcmp(oldData, newData, byteCount) == 0 { return nil } + let imageSerializer = ImageSerializer() guard - let pngData = NSImagePNGRepresentation(new), - let newerCgImage = NSImage(data: pngData)?.cgImage( + let imageData = imageSerializer.encodeImage(new, imageFormat: imageFormat), + let newerCgImage = imageSerializer.decodeImage(imageData, imageFormat: imageFormat)?.cgImage( forProposedRect: nil, context: nil, hints: nil), let newerContext = context(for: newerCgImage), let newerData = newerContext.data diff --git a/Sources/SnapshotTesting/Snapshotting/NSView.swift b/Sources/SnapshotTesting/Snapshotting/NSView.swift index b2e7edfb0..b83240926 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSView.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSView.swift @@ -1,11 +1,12 @@ #if os(macOS) import AppKit import Cocoa + import ImageSerializationPlugin extension Snapshotting where Value == NSView, Format == NSImage { /// A snapshot strategy for comparing views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing views based on pixel equality. @@ -21,10 +22,10 @@ /// human eye. /// - size: A view size override. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil + precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).asyncPullback { view in let initialSize = view.frame.size if let size = size { view.frame.size = size } diff --git a/Sources/SnapshotTesting/Snapshotting/NSViewController.swift b/Sources/SnapshotTesting/Snapshotting/NSViewController.swift index 69ec72dde..2d841701c 100644 --- a/Sources/SnapshotTesting/Snapshotting/NSViewController.swift +++ b/Sources/SnapshotTesting/Snapshotting/NSViewController.swift @@ -1,11 +1,12 @@ #if os(macOS) import AppKit import Cocoa + import ImageSerializationPlugin extension Snapshotting where Value == NSViewController, Format == NSImage { /// A snapshot strategy for comparing view controller views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing view controller views based on pixel equality. @@ -18,10 +19,10 @@ /// human eye. /// - size: A view size override. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil + precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, imageFormat: ImageSerializationFormat ) -> Snapshotting { return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, size: size + precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat ).pullback { $0.view } } } diff --git a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift b/Sources/SnapshotTesting/Snapshotting/SceneKit.swift index 94ff90459..758296e23 100644 --- a/Sources/SnapshotTesting/Snapshotting/SceneKit.swift +++ b/Sources/SnapshotTesting/Snapshotting/SceneKit.swift @@ -1,5 +1,6 @@ #if os(iOS) || os(macOS) || os(tvOS) import SceneKit + import ImageSerializationPlugin #if os(macOS) import Cocoa #elseif os(iOS) || os(tvOS) @@ -17,10 +18,10 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #elseif os(iOS) || os(tvOS) @@ -34,20 +35,20 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .scnScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #endif extension Snapshotting where Value == SCNScene, Format == Image { - fileprivate static func scnScene(precision: Float, perceptualPrecision: Float, size: CGSize) + fileprivate static func scnScene(precision: Float, perceptualPrecision: Float, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { scene in let view = SCNView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) view.scene = scene diff --git a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift b/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift index ad515050a..a073f190b 100644 --- a/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift +++ b/Sources/SnapshotTesting/Snapshotting/SpriteKit.swift @@ -1,5 +1,6 @@ #if os(iOS) || os(macOS) || os(tvOS) import SpriteKit + import ImageSerializationPlugin #if os(macOS) import Cocoa #elseif os(iOS) || os(tvOS) @@ -17,10 +18,10 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #elseif os(iOS) || os(tvOS) @@ -34,20 +35,20 @@ /// [the precision](http://zschuessler.github.io/DeltaE/learn/#toc-defining-delta-e) of the /// human eye. /// - size: The size of the scene. - public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize) + public static func image(precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { - return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size) + return .skScene(precision: precision, perceptualPrecision: perceptualPrecision, size: size, imageFormat: imageFormat) } } #endif extension Snapshotting where Value == SKScene, Format == Image { - fileprivate static func skScene(precision: Float, perceptualPrecision: Float, size: CGSize) + fileprivate static func skScene(precision: Float, perceptualPrecision: Float, size: CGSize, imageFormat: ImageSerializationFormat) -> Snapshotting { return Snapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision + precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat ).pullback { scene in let view = SKView(frame: .init(x: 0, y: 0, width: size.width, height: size.height)) view.presentScene(scene) diff --git a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift index 8d85e1f0b..673ce859c 100644 --- a/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/SwiftUIView.swift @@ -1,6 +1,7 @@ #if canImport(SwiftUI) import Foundation import SwiftUI + import ImageSerializationPlugin /// The size constraint for a snapshot (similar to `PreviewLayout`). public enum SwiftUISnapshotLayout { @@ -20,7 +21,7 @@ /// A snapshot strategy for comparing SwiftUI Views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing SwiftUI Views based on pixel equality. @@ -41,7 +42,8 @@ precision: Float = 1, perceptualPrecision: Float = 1, layout: SwiftUISnapshotLayout = .sizeThatFits, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { @@ -60,7 +62,7 @@ } return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { view in var config = config diff --git a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift index 6b48d622d..78b1891fd 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIBezierPath.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit + import ImageSerializationPlugin extension Snapshotting where Value == UIBezierPath, Format == UIImage { /// A snapshot strategy for comparing bezier paths based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing bezier paths based on pixel equality. @@ -17,10 +18,10 @@ /// human eye. /// - scale: The scale to use when loading the reference image from disk. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1 + precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat = 1, imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale + precision: precision, perceptualPrecision: perceptualPrecision, scale: scale, imageFormat: imageFormat ).pullback { path in let bounds = path.bounds let format: UIGraphicsImageRendererFormat diff --git a/Sources/SnapshotTesting/Snapshotting/UIImage.swift b/Sources/SnapshotTesting/Snapshotting/UIImage.swift index 3d1bb5319..8d97de37f 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIImage.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIImage.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit import XCTest + import ImageSerializationPlugin extension Diffing where Value == UIImage { /// A pixel-diffing strategy for UIImage's which requires a 100% match. - public static let image = Diffing.image() + public static let image = Diffing.image(imageFormat: imageFormat) /// A pixel-diffing strategy for UIImage that allows customizing how precise the matching must be. /// @@ -18,7 +19,7 @@ /// `UITraitCollection`s default value of `0.0`, the screens scale is used. /// - Returns: A new diffing strategy. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil + precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil, imageFormat: ImageSerializationFormat = imageFormat ) -> Diffing { let imageScale: CGFloat if let scale = scale, scale != 0.0 { @@ -26,14 +27,14 @@ } else { imageScale = UIScreen.main.scale } - + let imageSerializer = ImageSerializer() return Diffing( - toData: { $0.pngData() ?? emptyImage().pngData()! }, - fromData: { UIImage(data: $0, scale: imageScale)! } + toData: { imageSerializer.encodeImage($0, imageFormat: imageFormat) ?? emptyImage().pngData()! }, // this seems inconsistant with macOS implementation + fromData: { imageSerializer.decodeImage($0, imageFormat: imageFormat)! } // missing imageScale here ) { old, new in guard let message = compare( - old, new, precision: precision, perceptualPrecision: perceptualPrecision) + old, new, precision: precision, perceptualPrecision: perceptualPrecision, imageFormat: imageFormat) else { return nil } let difference = SnapshotTesting.diff(old, new) let oldAttachment = XCTAttachment(image: old) @@ -65,7 +66,7 @@ extension Snapshotting where Value == UIImage, Format == UIImage { /// A snapshot strategy for comparing images based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing images based on pixel equality. @@ -78,12 +79,12 @@ /// human eye. /// - scale: The scale of the reference image stored on disk. public static func image( - precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil + precision: Float = 1, perceptualPrecision: Float = 1, scale: CGFloat? = nil, imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return .init( - pathExtension: "png", + pathExtension: imageFormat.rawValue, diffing: .image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: scale) + precision: precision, perceptualPrecision: perceptualPrecision, scale: scale, imageFormat: imageFormat) ) } } @@ -93,7 +94,7 @@ private let imageContextBitsPerComponent = 8 private let imageContextBytesPerPixel = 4 - private func compare(_ old: UIImage, _ new: UIImage, precision: Float, perceptualPrecision: Float) + private func compare(_ old: UIImage, _ new: UIImage, precision: Float, perceptualPrecision: Float, imageFormat: ImageSerializationFormat) -> String? { guard let oldCgImage = old.cgImage else { @@ -118,9 +119,10 @@ if memcmp(oldData, newData, byteCount) == 0 { return nil } } var newerBytes = [UInt8](repeating: 0, count: byteCount) + let imageSerializer = ImageSerializer() guard - let pngData = new.pngData(), - let newerCgImage = UIImage(data: pngData)?.cgImage, + let imageData = imageSerializer.encodeImage(new, imageFormat: imageFormat), + let newerCgImage = imageSerializer.decodeImage(imageData, imageFormat: imageFormat)?.cgImage, let newerContext = context(for: newerCgImage, data: &newerBytes), let newerData = newerContext.data else { diff --git a/Sources/SnapshotTesting/Snapshotting/UIView.swift b/Sources/SnapshotTesting/Snapshotting/UIView.swift index 7244f67d1..44885d80c 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIView.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIView.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit + import ImageSerializationPlugin extension Snapshotting where Value == UIView, Format == UIImage { /// A snapshot strategy for comparing views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing views based on pixel equality. @@ -25,13 +26,14 @@ precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { view in snapshotView( config: .init(safeArea: .zero, size: size ?? view.frame.size, traits: .init()), diff --git a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift index b08b8bf59..f6562b320 100644 --- a/Sources/SnapshotTesting/Snapshotting/UIViewController.swift +++ b/Sources/SnapshotTesting/Snapshotting/UIViewController.swift @@ -1,10 +1,11 @@ #if os(iOS) || os(tvOS) import UIKit + import ImageSerializationPlugin extension Snapshotting where Value == UIViewController, Format == UIImage { /// A snapshot strategy for comparing view controller views based on pixel equality. public static var image: Snapshotting { - return .image() + return .image(imageFormat: imageFormat) } /// A snapshot strategy for comparing view controller views based on pixel equality. @@ -23,13 +24,14 @@ precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat = imageFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { viewController in snapshotView( config: size.map { .init(safeArea: config.safeArea, size: $0, traits: config.traits) } @@ -60,13 +62,14 @@ precision: Float = 1, perceptualPrecision: Float = 1, size: CGSize? = nil, - traits: UITraitCollection = .init() + traits: UITraitCollection = .init(), + imageFormat: ImageSerializationFormat ) -> Snapshotting { return SimplySnapshotting.image( - precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale + precision: precision, perceptualPrecision: perceptualPrecision, scale: traits.displayScale, imageFormat: imageFormat ).asyncPullback { viewController in snapshotView( config: .init(safeArea: .zero, size: size, traits: traits), diff --git a/Sources/SnapshotTestingPlugin/SnapshotTestingPlugin.swift b/Sources/SnapshotTestingPlugin/SnapshotTestingPlugin.swift new file mode 100644 index 000000000..e49b4be42 --- /dev/null +++ b/Sources/SnapshotTestingPlugin/SnapshotTestingPlugin.swift @@ -0,0 +1,25 @@ +#if canImport(Foundation) && canImport(ObjectiveC) +import Foundation + +/// A protocol that defines a plugin for snapshot testing, designed to be used in environments that support Objective-C. +/// +/// The `SnapshotTestingPlugin` protocol is intended to be adopted by classes that provide specific functionality for snapshot testing. +/// It requires each conforming class to have a unique identifier and a parameterless initializer. This protocol is designed to be used in +/// environments where both Foundation and Objective-C are available, making it compatible with Objective-C runtime features. +/// +/// Conforming classes must be marked with `@objc` to ensure compatibility with Objective-C runtime mechanisms. +@objc public protocol SnapshotTestingPlugin { + + /// A unique string identifier for the plugin. + /// + /// Each plugin must provide a static identifier that uniquely distinguishes it from other plugins. This identifier is used + /// to register and retrieve plugins within a registry, ensuring that each plugin can be easily identified and utilized. + static var identifier: String { get } + + /// Initializes a new instance of the plugin. + /// + /// This initializer is required to allow the Objective-C runtime to create instances of the plugin class when registering + /// and utilizing plugins. The initializer must not take any parameters. + init() +} +#endif