I am very new to Swift. I am trying to create a Grayscale filter using the mean method.
This is the RGBAImage.swift file I am using:
import UIKit
public struct Pixel {
public var value: UInt32
public var red: UInt8 {
get {
return UInt8(value & 0xFF)
}
set {
value = UInt32(newValue) | (value & 0xFFFFFF00)
}
}
public var green: UInt8 {
get {
return UInt8((value >> 8) & 0xFF)
}
set {
value = (UInt32(newValue) << 8) | (value & 0xFFFF00FF)
}
}
public var blue: UInt8 {
get {
return UInt8((value >> 16) & 0xFF)
}
set {
value = (UInt32(newValue) << 16) | (value & 0xFF00FFFF)
}
}
public var alpha: UInt8 {
get {
return UInt8((value >> 24) & 0xFF)
}
set {
value = (UInt32(newValue) << 24) | (value & 0x00FFFFFF)
}
}
}
public struct RGBAImage {
public var pixels: UnsafeMutableBufferPointer<Pixel>
public var width: Int
public var height: Int
public init?(image: UIImage) {
guard let cgImage = image.CGImage else { return nil }
// Redraw image for correct pixel format
let colorSpace = CGColorSpaceCreateDeviceRGB()
var bitmapInfo: UInt32 = CGBitmapInfo.ByteOrder32Big.rawValue
bitmapInfo |= CGImageAlphaInfo.PremultipliedLast.rawValue & CGBitmapInfo.AlphaInfoMask.rawValue
width = Int(image.size.width)
height = Int(image.size.height)
let bytesPerRow = width * 4
let imageData = UnsafeMutablePointer<Pixel>.alloc(width * height)
guard let imageContext = CGBitmapContextCreate(imageData, width, height, 8, bytesPerRow, colorSpace, bitmapInfo) else { return nil }
CGContextDrawImage(imageContext, CGRect(origin: CGPointZero, size: image.size), cgImage)
pixels = UnsafeMutableBufferPointer<Pixel>(start: imageData, count: width * height)
}
public func toUIImage() -> UIImage? {
let colorSpace = CGColorSpaceCreateDeviceRGB()
var bitmapInfo: UInt32 = CGBitmapInfo.ByteOrder32Big.rawValue
bitmapInfo |= CGImageAlphaInfo.PremultipliedLast.rawValue & CGBitmapInfo.AlphaInfoMask.rawValue
let bytesPerRow = width * 4
let imageContext = CGBitmapContextCreateWithData(pixels.baseAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo, nil, nil)
guard let cgImage = CGBitmapContextCreateImage(imageContext) else {return nil}
let image = UIImage(CGImage: cgImage)
return image
}
}
And this is my code:
import UIKit
let image = UIImage(named: "sample")!
let myRGBA = RGBAImage(image: image)!
struct grayscale_mean_method {
var result: UIImage {
for y in 0..<myRGBA.height {
for x in 0..<myRGBA.width {
let index = y * myRGBA.width + x
var pixel = myRGBA.pixels[index]
let intensity = (pixel.blue + pixel.green + pixel.red) / 3
let gray = UInt8(max(0,min(255, intensity)))
pixel.red = gray
pixel.green = gray
pixel.blue = gray
myRGBA.pixels[index] = pixel
}
}
return myRGBA.toUIImage()!
}
}
var newImage = grayscale_mean_method.init().result
But I am getting an error when assigning
let intensity = (pixel.blue + pixel.green + pixel.red) / 3
Execution was interrupted, reason = EXC_BAD_INSTRUCTION (code =EXC_l386_INVOP, subcode=0x0)
I searched but didn't find a solution. Any idea what am I doing wrong?
In
let intensity = (pixel.blue + pixel.green + pixel.red) / 3
all operands of the addition have the type UInt8
, and the addition
overflows (with a runtime exception) if the result does not fit
into the range of UInt8
. To solve the problem, convert all values
to Int
before computing the average:
let intensity = (Int(pixel.blue) + Int(pixel.green) + Int(pixel.red)) / 3