Skip to content

Imagen editing #15174

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,42 @@
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
struct ImageGenerationInstance {
let prompt: String
let referenceImages: [ImagenReferenceImage]?

init(prompt: String, referenceImages: [ImagenReferenceImage]? = nil) {
self.prompt = prompt
self.referenceImages = referenceImages
}
}

@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
extension ImageGenerationInstance: Equatable {}
extension ImageGenerationInstance: Equatable {
static func == (lhs: ImageGenerationInstance, rhs: ImageGenerationInstance) -> Bool {
return lhs.prompt == rhs.prompt && lhs.referenceImages?.count == rhs.referenceImages?.count
}
}

// MARK: - Codable Conformance

@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
extension ImageGenerationInstance: Encodable {}
extension ImageGenerationInstance: Encodable {
enum CodingKeys: String, CodingKey {
case prompt
case referenceImages = "image"
}

func encode(to encoder: any Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(prompt, forKey: .prompt)
if let referenceImages = referenceImages {
var imagesContainer = container.nestedUnkeyedContainer(forKey: .referenceImages)
for image in referenceImages {
if let rawImage = image as? ImagenRawImage {
try imagesContainer.encode(rawImage)
} else if let mask = image as? ImagenMaskReference {
try imagesContainer.encode(mask)
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,32 @@ struct ImageGenerationParameters {
let outputOptions: ImageGenerationOutputOptions?
let addWatermark: Bool?
let includeResponsibleAIFilterReason: Bool?
let editMode: String?
let editConfig: ImageEditingParameters?

init(sampleCount: Int?,
storageURI: String?,
negativePrompt: String?,
aspectRatio: String?,
safetyFilterLevel: String?,
personGeneration: String?,
outputOptions: ImageGenerationOutputOptions?,
addWatermark: Bool?,
includeResponsibleAIFilterReason: Bool?,
editMode: String? = nil,
editConfig: ImageEditingParameters? = nil) {
self.sampleCount = sampleCount
self.storageURI = storageURI
self.negativePrompt = negativePrompt
self.aspectRatio = aspectRatio
self.safetyFilterLevel = safetyFilterLevel
self.personGeneration = personGeneration
self.outputOptions = outputOptions
self.addWatermark = addWatermark
self.includeResponsibleAIFilterReason = includeResponsibleAIFilterReason
self.editMode = editMode
self.editConfig = editConfig
}
}

@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
Expand All @@ -42,6 +68,8 @@ extension ImageGenerationParameters: Encodable {
case outputOptions
case addWatermark
case includeResponsibleAIFilterReason = "includeRaiReason"
case editMode
case editConfig
}

func encode(to encoder: any Encoder) throws {
Expand All @@ -58,5 +86,12 @@ extension ImageGenerationParameters: Encodable {
includeResponsibleAIFilterReason,
forKey: .includeResponsibleAIFilterReason
)
try container.encodeIfPresent(editMode, forKey: .editMode)
try container.encodeIfPresent(editConfig, forKey: .editConfig)
}
}

@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
struct ImageEditingParameters: Codable, Equatable {
let editSteps: Int?
}
30 changes: 30 additions & 0 deletions FirebaseAI/Sources/Types/Public/Imagen/Dimensions.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import Foundation

/// Represents the dimensions of an image.
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public struct Dimensions: Codable, Sendable {
/// The width of the image in pixels.
public let width: Int

/// The height of the image in pixels.
public let height: Int

public init(width: Int, height: Int) {
self.width = width
self.height = height
}
}
31 changes: 31 additions & 0 deletions FirebaseAI/Sources/Types/Public/Imagen/ImagenEditMode.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import Foundation

/// The editing method to use.
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public enum ImagenEditMode: String, Codable, Sendable {
/// The model should use the prompt and reference images to generate a new image.
case product = "product-image"

/// The model should generate a new background for the given image.
case background = "background-refresh"

/// The model should replace the masked region of the image with new content.
case inpaint = "inpainting"

/// The model should extend the image beyond its original borders.
case outpaint = "outpainting"
}
30 changes: 30 additions & 0 deletions FirebaseAI/Sources/Types/Public/Imagen/ImagenEditingConfig.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import Foundation

/// Configuration for editing an image with Imagen.
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public struct ImagenEditingConfig: Codable, Sendable {
/// The editing method to use.
public let editMode: ImagenEditMode

/// The number of steps to use for the editing process.
public let editSteps: Int?

public init(editMode: ImagenEditMode, editSteps: Int? = nil) {
self.editMode = editMode
self.editSteps = editSteps
}
}
49 changes: 49 additions & 0 deletions FirebaseAI/Sources/Types/Public/Imagen/ImagenImagePlacement.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import Foundation

/// Represents the placement of an image within a larger canvas.
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public enum ImagenImagePlacement: Sendable {
/// The image is placed at the top left corner of the canvas.
case topLeft

/// The image is placed at the top center of the canvas.
case topCenter

/// The image is placed at the top right corner of the canvas.
case topRight

/// The image is placed at the middle left of the canvas.
case middleLeft

/// The image is placed in the center of the canvas.
case center

/// The image is placed at the middle right of the canvas.
case middleRight

/// The image is placed at the bottom left corner of the canvas.
case bottomLeft

/// The image is placed at the bottom center of the canvas.
case bottomCenter

/// The image is placed at the bottom right corner of the canvas.
case bottomRight

/// The image is placed at a custom offset from the top left corner of the canvas.
case custom(x: Int, y: Int)
}
130 changes: 130 additions & 0 deletions FirebaseAI/Sources/Types/Public/Imagen/ImagenMaskReference.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

import CoreGraphics
import Foundation
import ImageIO

/// A reference to a mask for inpainting.
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public struct ImagenMaskReference: ImagenReferenceImage, Encodable {
/// The mask data.
public let data: Data

public init(data: Data) {
self.data = data
}

enum CodingKeys: String, CodingKey {
case data = "bytesBase64Encoded"
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(data.base64EncodedString(), forKey: .data)
}

static func generateMaskAndPadForOutpainting(image: ImagenInlineImage,
newDimensions: Dimensions,
newPosition: ImagenImagePlacement) throws
-> [ImagenReferenceImage] {
guard let cgImage = CGImage.fromData(image.data) else {
throw NSError(domain: "com.google.firebase.ai", code: 0, userInfo: [NSLocalizedDescriptionKey: "Could not create image from data."])
}

let originalWidth = cgImage.width
let originalHeight = cgImage.height

guard newDimensions.width >= originalWidth, newDimensions.height >= originalHeight else {
throw NSError(domain: "com.google.firebase.ai", code: 0, userInfo: [NSLocalizedDescriptionKey: "New dimensions must be larger than the original image."])
}

let offsetX: Int
let offsetY: Int

switch newPosition {
case .topLeft:
offsetX = 0
offsetY = 0
case .topCenter:
offsetX = (newDimensions.width - originalWidth) / 2
offsetY = 0
case .topRight:
offsetX = newDimensions.width - originalWidth
offsetY = 0
case .middleLeft:
offsetX = 0
offsetY = (newDimensions.height - originalHeight) / 2
case .center:
offsetX = (newDimensions.width - originalWidth) / 2
offsetY = (newDimensions.height - originalHeight) / 2
case .middleRight:
offsetX = newDimensions.width - originalWidth
offsetY = (newDimensions.height - originalHeight) / 2
case .bottomLeft:
offsetX = 0
offsetY = newDimensions.height - originalHeight
case .bottomCenter:
offsetX = (newDimensions.width - originalWidth) / 2
offsetY = newDimensions.height - originalHeight
case .bottomRight:
offsetX = newDimensions.width - originalWidth
offsetY = newDimensions.height - originalHeight
case let .custom(x, y):
offsetX = x
offsetY = y
}

let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGImageAlphaInfo.premultipliedLast.rawValue

// Create padded image
guard let paddedContext = CGContext(data: nil, width: newDimensions.width, height: newDimensions.height, bitsPerComponent: 8, bytesPerRow: 0, space: colorSpace, bitmapInfo: bitmapInfo) else {
throw NSError(domain: "com.google.firebase.ai", code: 0, userInfo: [NSLocalizedDescriptionKey: "Could not create padded image context."])
}
paddedContext.draw(cgImage, in: CGRect(x: offsetX, y: offsetY, width: originalWidth, height: originalHeight))
guard let paddedCGImage = paddedContext.makeImage(), let paddedImageData = paddedCGImage.toData() else {
throw NSError(domain: "com.google.firebase.ai", code: 0, userInfo: [NSLocalizedDescriptionKey: "Could not get padded image data."])
}

// Create mask
guard let maskContext = CGContext(data: nil, width: newDimensions.width, height: newDimensions.height, bitsPerComponent: 8, bytesPerRow: 0, space: CGColorSpaceCreateDeviceGray(), bitmapInfo: CGImageAlphaInfo.none.rawValue) else {
throw NSError(domain: "com.google.firebase.ai", code: 0, userInfo: [NSLocalizedDescriptionKey: "Could not create mask context."])
}
maskContext.setFillColor(gray: 1.0, alpha: 1.0)
maskContext.fill(CGRect(x: 0, y: 0, width: newDimensions.width, height: newDimensions.height))
maskContext.setFillColor(gray: 0.0, alpha: 1.0)
maskContext.fill(CGRect(x: offsetX, y: offsetY, width: originalWidth, height: originalHeight))
guard let maskCGImage = maskContext.makeImage(), let maskData = maskCGImage.toData() else {
throw NSError(domain: "com.google.firebase.ai", code: 0, userInfo: [NSLocalizedDescriptionKey: "Could not get mask data."])
}

return [ImagenRawImage(data: paddedImageData), ImagenMaskReference(data: maskData)]
}
}

extension CGImage {
static func fromData(_ data: Data) -> CGImage? {
guard let provider = CGDataProvider(data: data as CFData) else { return nil }
return CGImage(pngDataProviderSource: provider, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
}

func toData() -> Data? {
guard let mutableData = CFDataCreateMutable(nil, 0),
let destination = CGImageDestinationCreateWithData(mutableData, "public.png" as CFString, 1, nil) else { return nil }
CGImageDestinationAddImage(destination, self, nil)
guard CGImageDestinationFinalize(destination) else { return nil }
return mutableData as Data
}
}
Loading
Loading