IOS 用名称保存图片
IOS Save image with name
我有一个应用程序可以拍照并将其与另一张图片合并。我希望能够显示在应用程序(并且只有我的应用程序)中拍摄的所有图像,并在图库视图中显示它们。我遇到的问题是为图像指定名称。
我尝试了很多不同的方法
第一次尝试
UIImageWriteToSavedPhotosAlbum(newImage, nil, nil, nil)
这保存了图片,但我不知道如何找回它
第二次尝试
PHPhotoLibrary.sharedPhotoLibrary().performChanges({
let assetRequest = PHAssetChangeRequest.creationRequestForAssetFromImage(newImage)
}, completionHandler: { (success, error) -> Void in
if success {
//image saved to photos library.
print("image saved")
}
});
与第一次尝试相同的问题
第三次尝试
let date :NSDate = NSDate()
let dateFormatter = NSDateFormatter()
dateFormatter.dateFormat = "yyyy-MM-dd'_'HH:mm:ss"
dateFormatter.timeZone = NSTimeZone(name: "GMT")
let imageName = "\(dateFormatter.stringFromDate(date)).jpg"
//var paths: [AnyObject] = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true)
var documentsDirectoryPath = getDocumentsURL().relativePath
documentsDirectoryPath! += imageName
let settingsData: NSData = UIImageJPEGRepresentation(newImage, 1.0)!
settingsData.writeToFile(documentsDirectoryPath!, atomically: true)
这似乎最有希望,但文件确实自动保存
我已经查看了这里的各种答案,但我仍然找不到解决方案
添加斜杠作为 imageName 字符串的第一个字符,如下所示:
let imageName = "/\(dateFormatter.stringFromDate(date)).jpg"
否则路径将是“..../Documentsmyimagename”但它应该是“..../Documents/myimagename”
也就是说,如果没有附加斜杠,图像将保存到 "Documents"
上方的文件夹中
我还建议不要在文件名中使用冒号字符,在 mac 上,冒号是不允许的,会自动替换。
这是我在一个新的 XCode 项目中测试的代码,它对我有用(我在项目的根文件夹中放置了一个文件 "myimage.png"):
let newImage = UIImage.init(named: "myimage")
let date :NSDate = NSDate()
let dateFormatter = NSDateFormatter()
//dateFormatter.dateFormat = "yyyy-MM-dd'_'HH:mm:ss"
dateFormatter.dateFormat = "yyyy-MM-dd'_'HH_mm_ss"
dateFormatter.timeZone = NSTimeZone(name: "GMT")
let imageName = "/\(dateFormatter.stringFromDate(date)).jpg"
print(imageName)
//var paths: [AnyObject] = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true)
//var documentsDirectoryPath = getDocumentsURL().relativePath
var documentsDirectoryPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
documentsDirectoryPath += imageName
print(documentsDirectoryPath)
let settingsData: NSData = UIImageJPEGRepresentation(newImage!, 1.0)!
settingsData.writeToFile(documentsDirectoryPath, atomically: true)
SWIFT 3 :)
import UIKit
class ShowImageViewController: UIViewController {
@IBOutlet weak var mainImageView: UIImageView!
@IBOutlet weak var tempImageView: UIImageView!
private var lastPoint = CGPoint.zero
private var red: CGFloat = 0.0
private var green: CGFloat = 0.0
private var blue: CGFloat = 0.0
private var brushWidth: CGFloat = 10.0
private var opacity: CGFloat = 1.0
private var swiped = false
override func viewDidLoad() {
super.viewDidLoad()
mainImageView.image = captureImages[selectedImageindex]
tempImageView.frame = getImageRect(for: mainImageView)
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = false
if let touch = touches.first {
lastPoint = touch.location(in: tempImageView)
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = true
if let touch = touches.first {
let currentPoint = touch.location(in: tempImageView)
drawLineFrom(fromPoint: lastPoint, toPoint: currentPoint)
lastPoint = currentPoint
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if !swiped {
drawLineFrom(fromPoint: lastPoint, toPoint: lastPoint)
}
// Merge tempImageView into mainImageView
UIGraphicsBeginImageContext(mainImageView.frame.size)
mainImageView.image?.draw(in: CGRect(x: 0, y: 0, width: mainImageView.frame.size.width, height: mainImageView.frame.size.height), blendMode: .normal, alpha: 1.0)
tempImageView.image?.draw(in: CGRect(x: 0, y: 0, width: mainImageView.frame.size.width, height: mainImageView.frame.size.height), blendMode: .normal, alpha: opacity)
mainImageView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
tempImageView.image = nil
}
func drawLineFrom(fromPoint: CGPoint, toPoint: CGPoint) {
UIGraphicsBeginImageContext(mainImageView.frame.size)
let context = UIGraphicsGetCurrentContext()
tempImageView.image?.draw(in: CGRect(x: 0, y: 0, width: mainImageView.frame.size.width, height: mainImageView.frame.size.height))
context!.move(to: CGPoint(x: fromPoint.x, y: fromPoint.y))
context!.addLine(to: CGPoint(x: toPoint.x, y: toPoint.y))
context!.setLineCap(.round)
context!.setLineWidth(brushWidth)
context!.setStrokeColor(red: red, green: green, blue: blue, alpha: 1.0)
context!.setBlendMode(.normal)
context!.strokePath()
tempImageView.image = UIGraphicsGetImageFromCurrentImageContext()
tempImageView.alpha = opacity
UIGraphicsEndImageContext()
}
func getImageRect(for imageView: UIImageView) -> CGRect {
let resVi = (imageView.image?.size.width)! / (imageView.image?.size.height)!
let resPl = imageView.frame.size.width / imageView.frame.size.height
if resPl > resVi {
let imageSize = CGSize(width: CGFloat((imageView.image?.size.width)! * imageView.frame.size.height / (imageView.image?.size.height)!), height: CGFloat(imageView.frame.size.height))
return CGRect(x: CGFloat((imageView.frame.size.width - imageSize.width) / 2), y: CGFloat((imageView.frame.size.height - imageSize.height) / 2), width: CGFloat(imageSize.width), height: CGFloat(imageSize.height))
}
else {
let imageSize = CGSize(width: CGFloat(imageView.frame.size.width), height: CGFloat((imageView.image?.size.height)! * imageView.frame.size.width / (imageView.image?.size.width)!))
return CGRect(x: CGFloat((imageView.frame.size.width - imageSize.width) / 2), y: CGFloat((imageView.frame.size.height - imageSize.height) / 2), width: CGFloat(imageSize.width), height: CGFloat(imageSize.height))
}
}
}
我有一个应用程序可以拍照并将其与另一张图片合并。我希望能够显示在应用程序(并且只有我的应用程序)中拍摄的所有图像,并在图库视图中显示它们。我遇到的问题是为图像指定名称。
我尝试了很多不同的方法
第一次尝试
UIImageWriteToSavedPhotosAlbum(newImage, nil, nil, nil)
这保存了图片,但我不知道如何找回它
第二次尝试
PHPhotoLibrary.sharedPhotoLibrary().performChanges({
let assetRequest = PHAssetChangeRequest.creationRequestForAssetFromImage(newImage)
}, completionHandler: { (success, error) -> Void in
if success {
//image saved to photos library.
print("image saved")
}
});
与第一次尝试相同的问题
第三次尝试
let date :NSDate = NSDate()
let dateFormatter = NSDateFormatter()
dateFormatter.dateFormat = "yyyy-MM-dd'_'HH:mm:ss"
dateFormatter.timeZone = NSTimeZone(name: "GMT")
let imageName = "\(dateFormatter.stringFromDate(date)).jpg"
//var paths: [AnyObject] = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true)
var documentsDirectoryPath = getDocumentsURL().relativePath
documentsDirectoryPath! += imageName
let settingsData: NSData = UIImageJPEGRepresentation(newImage, 1.0)!
settingsData.writeToFile(documentsDirectoryPath!, atomically: true)
这似乎最有希望,但文件确实自动保存
我已经查看了这里的各种答案,但我仍然找不到解决方案
添加斜杠作为 imageName 字符串的第一个字符,如下所示:
let imageName = "/\(dateFormatter.stringFromDate(date)).jpg"
否则路径将是“..../Documentsmyimagename”但它应该是“..../Documents/myimagename” 也就是说,如果没有附加斜杠,图像将保存到 "Documents"
上方的文件夹中我还建议不要在文件名中使用冒号字符,在 mac 上,冒号是不允许的,会自动替换。
这是我在一个新的 XCode 项目中测试的代码,它对我有用(我在项目的根文件夹中放置了一个文件 "myimage.png"):
let newImage = UIImage.init(named: "myimage")
let date :NSDate = NSDate()
let dateFormatter = NSDateFormatter()
//dateFormatter.dateFormat = "yyyy-MM-dd'_'HH:mm:ss"
dateFormatter.dateFormat = "yyyy-MM-dd'_'HH_mm_ss"
dateFormatter.timeZone = NSTimeZone(name: "GMT")
let imageName = "/\(dateFormatter.stringFromDate(date)).jpg"
print(imageName)
//var paths: [AnyObject] = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true)
//var documentsDirectoryPath = getDocumentsURL().relativePath
var documentsDirectoryPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
documentsDirectoryPath += imageName
print(documentsDirectoryPath)
let settingsData: NSData = UIImageJPEGRepresentation(newImage!, 1.0)!
settingsData.writeToFile(documentsDirectoryPath, atomically: true)
SWIFT 3 :)
import UIKit
class ShowImageViewController: UIViewController {
@IBOutlet weak var mainImageView: UIImageView!
@IBOutlet weak var tempImageView: UIImageView!
private var lastPoint = CGPoint.zero
private var red: CGFloat = 0.0
private var green: CGFloat = 0.0
private var blue: CGFloat = 0.0
private var brushWidth: CGFloat = 10.0
private var opacity: CGFloat = 1.0
private var swiped = false
override func viewDidLoad() {
super.viewDidLoad()
mainImageView.image = captureImages[selectedImageindex]
tempImageView.frame = getImageRect(for: mainImageView)
// Do any additional setup after loading the view.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = false
if let touch = touches.first {
lastPoint = touch.location(in: tempImageView)
}
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
swiped = true
if let touch = touches.first {
let currentPoint = touch.location(in: tempImageView)
drawLineFrom(fromPoint: lastPoint, toPoint: currentPoint)
lastPoint = currentPoint
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
if !swiped {
drawLineFrom(fromPoint: lastPoint, toPoint: lastPoint)
}
// Merge tempImageView into mainImageView
UIGraphicsBeginImageContext(mainImageView.frame.size)
mainImageView.image?.draw(in: CGRect(x: 0, y: 0, width: mainImageView.frame.size.width, height: mainImageView.frame.size.height), blendMode: .normal, alpha: 1.0)
tempImageView.image?.draw(in: CGRect(x: 0, y: 0, width: mainImageView.frame.size.width, height: mainImageView.frame.size.height), blendMode: .normal, alpha: opacity)
mainImageView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
tempImageView.image = nil
}
func drawLineFrom(fromPoint: CGPoint, toPoint: CGPoint) {
UIGraphicsBeginImageContext(mainImageView.frame.size)
let context = UIGraphicsGetCurrentContext()
tempImageView.image?.draw(in: CGRect(x: 0, y: 0, width: mainImageView.frame.size.width, height: mainImageView.frame.size.height))
context!.move(to: CGPoint(x: fromPoint.x, y: fromPoint.y))
context!.addLine(to: CGPoint(x: toPoint.x, y: toPoint.y))
context!.setLineCap(.round)
context!.setLineWidth(brushWidth)
context!.setStrokeColor(red: red, green: green, blue: blue, alpha: 1.0)
context!.setBlendMode(.normal)
context!.strokePath()
tempImageView.image = UIGraphicsGetImageFromCurrentImageContext()
tempImageView.alpha = opacity
UIGraphicsEndImageContext()
}
func getImageRect(for imageView: UIImageView) -> CGRect {
let resVi = (imageView.image?.size.width)! / (imageView.image?.size.height)!
let resPl = imageView.frame.size.width / imageView.frame.size.height
if resPl > resVi {
let imageSize = CGSize(width: CGFloat((imageView.image?.size.width)! * imageView.frame.size.height / (imageView.image?.size.height)!), height: CGFloat(imageView.frame.size.height))
return CGRect(x: CGFloat((imageView.frame.size.width - imageSize.width) / 2), y: CGFloat((imageView.frame.size.height - imageSize.height) / 2), width: CGFloat(imageSize.width), height: CGFloat(imageSize.height))
}
else {
let imageSize = CGSize(width: CGFloat(imageView.frame.size.width), height: CGFloat((imageView.image?.size.height)! * imageView.frame.size.width / (imageView.image?.size.width)!))
return CGRect(x: CGFloat((imageView.frame.size.width - imageSize.width) / 2), y: CGFloat((imageView.frame.size.height - imageSize.height) / 2), width: CGFloat(imageSize.width), height: CGFloat(imageSize.height))
}
}
}