为什么这个 Swift UIImage 函数会溢出我的内存?
Why does this Swift UIImage function overflow my memory?
我正在使用 Swift 为 iPhone 构建一个应用程序 4. 我有一些测试过滤器。两者都可以通过相机的输出正常工作,但是当我从更复杂的图像中创建一组图像时,我的内存溢出到灾难性的比例并使我的应用程序崩溃。
我在下面的一个循环中调用了这个,它溢出了我的记忆:
func rotateHue2(with ciImage: CIImage,
rotatedByHue deltaHueRadians: CGFloat,
orientation:UIImageOrientation?,
screenWidth:CGFloat,
screenHeight:CGFloat) -> UIImage {
let sourceCore = ciImage
let transBG = UIImage(color: .clear, size: CGSize(width: screenWidth, height: screenHeight))
let transBGCI = CIImage(cgImage: (transBG?.cgImage)!)
// Part 1
let gradientPoint0Pos: [CGFloat] = [0, 0]
let inputPoint0Vector = CIVector(values: gradientPoint0Pos, count: gradientPoint0Pos.count)
var gradientPoint1Pos: [CGFloat]
if(orientation == nil){
gradientPoint1Pos = [0, screenWidth*2]
}else{
gradientPoint1Pos = [screenHeight*2, 0]
}
let inputPoint1Vector = CIVector(values: gradientPoint1Pos, count: gradientPoint1Pos.count)
let gradientFilter = CIFilter(name: "CISmoothLinearGradient")
gradientFilter?.setDefaults()
gradientFilter?.setValue(inputPoint0Vector, forKey: "inputPoint0")
gradientFilter?.setValue(inputPoint1Vector, forKey: "inputPoint1")
gradientFilter?.setValue(CIColor.clear, forKey:"inputColor0")
gradientFilter?.setValue(CIColor.black, forKey:"inputColor1")
let gradient = gradientFilter?.outputImage?
.cropped(to: sourceCore.extent)
let hue1 = sourceCore
.applyingFilter("CIHueAdjust", parameters: [kCIInputImageKey: sourceCore,
kCIInputAngleKey: deltaHueRadians])
.cropped(to: sourceCore.extent)
let alphaMaskBlend1 = CIFilter(name: "CIBlendWithAlphaMask",
withInputParameters: [kCIInputImageKey: hue1,
kCIInputBackgroundImageKey: transBGCI,
kCIInputMaskImageKey:gradient!])?.outputImage?
.cropped(to: sourceCore.extent)
// Part 2
let hue2 = sourceCore
.applyingFilter("CIHueAdjust", parameters: [kCIInputImageKey: sourceCore,
kCIInputAngleKey: deltaHueRadians+1.5707])
.cropped(to: sourceCore.extent)
let blendedMasks = hue2
.applyingFilter(compositeOperationFilters[compositeOperationFiltersIndex], parameters: [kCIInputImageKey: alphaMaskBlend1!,
kCIInputBackgroundImageKey: hue2])
.cropped(to: sourceCore.extent)
// Convert the filter output back into a UIImage.
let context = CIContext(options: nil)
let resultRef = context.createCGImage(blendedMasks, from: blendedMasks.extent)
var result:UIImage? = nil
if(orientation != nil){
result = UIImage(cgImage: resultRef!, scale: 1.0, orientation: orientation!)
}else{
result = UIImage(cgImage: resultRef!)
}
return result!
}
每张图片都根据 phone 的方向调整为 1280 或 720 宽。为什么当我的其他图像过滤器工作正常时,这会给我一个内存警告?
只是为了好玩,这是另一个不会让它崩溃的:
func rotateHue(with ciImage: CIImage,
rotatedByHue deltaHueRadians: CGFloat,
orientation:UIImageOrientation?,
screenWidth:CGFloat,
screenHeight:CGFloat) -> UIImage {
// Create a Core Image version of the image.
let sourceCore = ciImage
// Apply a CIHueAdjust filter
let hueAdjust = CIFilter(name: "CIHueAdjust")
hueAdjust?.setDefaults()
hueAdjust?.setValue(sourceCore, forKey: "inputImage")
hueAdjust?.setValue(deltaHueRadians, forKey: "inputAngle")
let resultCore = CIFilter(name: "CIHueAdjust",
withInputParameters: [kCIInputImageKey: sourceCore,
kCIInputAngleKey: deltaHueRadians])?.outputImage?
.cropped(to: sourceCore.extent)
// Convert the filter output back into a UIImage.
let context = CIContext(options: nil)
let resultRef = context.createCGImage(resultCore!, from: (resultCore?.extent)!)
var result:UIImage? = nil
if(orientation != nil){
result = UIImage(cgImage: resultRef!, scale: 1.0, orientation: orientation!)
}else{
result = UIImage(cgImage: resultRef!)
}
return result!
}
首先 你应该做的事情是将你的 CIContext
移出函数并使其尽可能全局化。创建它是对内存的主要使用。
问题不大,为什么每张图片裁剪五次?这可能不是问题所在,但 "feels" 对我来说是错误的。 CIImage
不是图像 - 它更接近 "recipe"。
更紧密地链接事物 - 让下一个过滤器的输入成为前一个过滤器的输出。完成后裁剪。最重要的是,尽可能少地创建 CIContexts
。
我正在使用 Swift 为 iPhone 构建一个应用程序 4. 我有一些测试过滤器。两者都可以通过相机的输出正常工作,但是当我从更复杂的图像中创建一组图像时,我的内存溢出到灾难性的比例并使我的应用程序崩溃。
我在下面的一个循环中调用了这个,它溢出了我的记忆:
func rotateHue2(with ciImage: CIImage,
rotatedByHue deltaHueRadians: CGFloat,
orientation:UIImageOrientation?,
screenWidth:CGFloat,
screenHeight:CGFloat) -> UIImage {
let sourceCore = ciImage
let transBG = UIImage(color: .clear, size: CGSize(width: screenWidth, height: screenHeight))
let transBGCI = CIImage(cgImage: (transBG?.cgImage)!)
// Part 1
let gradientPoint0Pos: [CGFloat] = [0, 0]
let inputPoint0Vector = CIVector(values: gradientPoint0Pos, count: gradientPoint0Pos.count)
var gradientPoint1Pos: [CGFloat]
if(orientation == nil){
gradientPoint1Pos = [0, screenWidth*2]
}else{
gradientPoint1Pos = [screenHeight*2, 0]
}
let inputPoint1Vector = CIVector(values: gradientPoint1Pos, count: gradientPoint1Pos.count)
let gradientFilter = CIFilter(name: "CISmoothLinearGradient")
gradientFilter?.setDefaults()
gradientFilter?.setValue(inputPoint0Vector, forKey: "inputPoint0")
gradientFilter?.setValue(inputPoint1Vector, forKey: "inputPoint1")
gradientFilter?.setValue(CIColor.clear, forKey:"inputColor0")
gradientFilter?.setValue(CIColor.black, forKey:"inputColor1")
let gradient = gradientFilter?.outputImage?
.cropped(to: sourceCore.extent)
let hue1 = sourceCore
.applyingFilter("CIHueAdjust", parameters: [kCIInputImageKey: sourceCore,
kCIInputAngleKey: deltaHueRadians])
.cropped(to: sourceCore.extent)
let alphaMaskBlend1 = CIFilter(name: "CIBlendWithAlphaMask",
withInputParameters: [kCIInputImageKey: hue1,
kCIInputBackgroundImageKey: transBGCI,
kCIInputMaskImageKey:gradient!])?.outputImage?
.cropped(to: sourceCore.extent)
// Part 2
let hue2 = sourceCore
.applyingFilter("CIHueAdjust", parameters: [kCIInputImageKey: sourceCore,
kCIInputAngleKey: deltaHueRadians+1.5707])
.cropped(to: sourceCore.extent)
let blendedMasks = hue2
.applyingFilter(compositeOperationFilters[compositeOperationFiltersIndex], parameters: [kCIInputImageKey: alphaMaskBlend1!,
kCIInputBackgroundImageKey: hue2])
.cropped(to: sourceCore.extent)
// Convert the filter output back into a UIImage.
let context = CIContext(options: nil)
let resultRef = context.createCGImage(blendedMasks, from: blendedMasks.extent)
var result:UIImage? = nil
if(orientation != nil){
result = UIImage(cgImage: resultRef!, scale: 1.0, orientation: orientation!)
}else{
result = UIImage(cgImage: resultRef!)
}
return result!
}
每张图片都根据 phone 的方向调整为 1280 或 720 宽。为什么当我的其他图像过滤器工作正常时,这会给我一个内存警告?
只是为了好玩,这是另一个不会让它崩溃的:
func rotateHue(with ciImage: CIImage,
rotatedByHue deltaHueRadians: CGFloat,
orientation:UIImageOrientation?,
screenWidth:CGFloat,
screenHeight:CGFloat) -> UIImage {
// Create a Core Image version of the image.
let sourceCore = ciImage
// Apply a CIHueAdjust filter
let hueAdjust = CIFilter(name: "CIHueAdjust")
hueAdjust?.setDefaults()
hueAdjust?.setValue(sourceCore, forKey: "inputImage")
hueAdjust?.setValue(deltaHueRadians, forKey: "inputAngle")
let resultCore = CIFilter(name: "CIHueAdjust",
withInputParameters: [kCIInputImageKey: sourceCore,
kCIInputAngleKey: deltaHueRadians])?.outputImage?
.cropped(to: sourceCore.extent)
// Convert the filter output back into a UIImage.
let context = CIContext(options: nil)
let resultRef = context.createCGImage(resultCore!, from: (resultCore?.extent)!)
var result:UIImage? = nil
if(orientation != nil){
result = UIImage(cgImage: resultRef!, scale: 1.0, orientation: orientation!)
}else{
result = UIImage(cgImage: resultRef!)
}
return result!
}
首先 你应该做的事情是将你的 CIContext
移出函数并使其尽可能全局化。创建它是对内存的主要使用。
问题不大,为什么每张图片裁剪五次?这可能不是问题所在,但 "feels" 对我来说是错误的。 CIImage
不是图像 - 它更接近 "recipe"。
更紧密地链接事物 - 让下一个过滤器的输入成为前一个过滤器的输出。完成后裁剪。最重要的是,尽可能少地创建 CIContexts
。