CGContext.draw(), CGContext.makeImage() 给我双重预期分辨率
CGContext.draw(), CGContext.makeImage() giving me double intended resolution
我已经编写了一个扩展来将 [CGImage] 渲染成一个大的合成图像。我的错误是生成的图像是预期分辨率的两倍。这是 bytesPerRow 问题吗?还是别的?
public extension Array where Element == CGImage {
func render(cols: Int) -> CGImage? {
guard count > 0 else { return nil }
var maxWidth: Int = 0
var totalHeight: Int = 0
var currentArrayIndex = 0
while currentArrayIndex < count {
var currentRowWidth = 0
var maxRowHeight = 0
for _ in 0..<cols {
currentRowWidth += self[currentArrayIndex].width
maxRowHeight = max(self[currentArrayIndex].height, maxRowHeight)
currentArrayIndex += 1
}
maxWidth = max(maxWidth, currentRowWidth)
totalHeight += maxRowHeight
}
let size = CGSize(width: maxWidth, height: totalHeight)
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
guard let context = UIGraphicsGetCurrentContext() else { return nil }
var x: Int = 0
var y: Int = 0
var rowMaxHeight = 0
for image in self {
context.saveGState()
context.translateBy(x: 0, y: CGFloat(image.height))
context.scaleBy(x: 1.0, y: -1.0)
context.draw(image, in: CGRect(x: x, y: y, width: image.width, height: image.height))
context.restoreGState()
rowMaxHeight = max(image.height, rowMaxHeight)
x += image.width
if x >= Int(size.width) {
x = 0
y -= rowMaxHeight
}
}
let cgImage = context.makeImage()
UIGraphicsEndImageContext()
return cgImage
}
private func max(_ one: Int, _ two: Int) -> Int {
if one > two { return one }
return two
}
}
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
你的最后一个参数scale
= 0.0
If you specify a value of 0.0, the scale factor is set to the scale factor of the device’s main screen.
(from Documentation)
表示设置为CGFloat scale = [[UIScreen mainScreen] scale];
For Retina displays, the scale factor may be 3.0 or 2.0 and one point can represented by nine or four pixels, respectively.
建议设置
UIGraphicsBeginImageContextWithOptions(size, false, 1.0);
再检查一次
我已经编写了一个扩展来将 [CGImage] 渲染成一个大的合成图像。我的错误是生成的图像是预期分辨率的两倍。这是 bytesPerRow 问题吗?还是别的?
public extension Array where Element == CGImage {
func render(cols: Int) -> CGImage? {
guard count > 0 else { return nil }
var maxWidth: Int = 0
var totalHeight: Int = 0
var currentArrayIndex = 0
while currentArrayIndex < count {
var currentRowWidth = 0
var maxRowHeight = 0
for _ in 0..<cols {
currentRowWidth += self[currentArrayIndex].width
maxRowHeight = max(self[currentArrayIndex].height, maxRowHeight)
currentArrayIndex += 1
}
maxWidth = max(maxWidth, currentRowWidth)
totalHeight += maxRowHeight
}
let size = CGSize(width: maxWidth, height: totalHeight)
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
guard let context = UIGraphicsGetCurrentContext() else { return nil }
var x: Int = 0
var y: Int = 0
var rowMaxHeight = 0
for image in self {
context.saveGState()
context.translateBy(x: 0, y: CGFloat(image.height))
context.scaleBy(x: 1.0, y: -1.0)
context.draw(image, in: CGRect(x: x, y: y, width: image.width, height: image.height))
context.restoreGState()
rowMaxHeight = max(image.height, rowMaxHeight)
x += image.width
if x >= Int(size.width) {
x = 0
y -= rowMaxHeight
}
}
let cgImage = context.makeImage()
UIGraphicsEndImageContext()
return cgImage
}
private func max(_ one: Int, _ two: Int) -> Int {
if one > two { return one }
return two
}
}
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
你的最后一个参数scale
= 0.0
If you specify a value of 0.0, the scale factor is set to the scale factor of the device’s main screen.
(from Documentation)
表示设置为CGFloat scale = [[UIScreen mainScreen] scale];
For Retina displays, the scale factor may be 3.0 or 2.0 and one point can represented by nine or four pixels, respectively.
建议设置
UIGraphicsBeginImageContextWithOptions(size, false, 1.0);
再检查一次