使用 Swift 从视频中抓取帧
Grab frames from video using Swift
我想在特定时间从视频中抓取帧。我正在调用我的 grab-frame-function,时间指定为 Float64 秒。
问题是它没有抓取当前帧。它似乎忽略了小数点。如果我用例如 1.22 和 1.70 调用该函数,它将 return 相同的帧。我对 Swift 很陌生,所以我想我没有正确获取 CMTime 对象。那么有人能看出这有什么问题吗?
func generateThumnail(url : NSURL, fromTime:Float64) -> UIImage {
var asset :AVAsset = AVAsset.assetWithURL(url) as! AVAsset
var assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
var error : NSError? = nil
var time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
var img : CGImageRef = assetImgGenerate.copyCGImageAtTime(time, actualTime: nil, error: &error)
var frameImg : UIImage = UIImage(CGImage: img)!
return frameImg
}
var grabTime = 1.22
img = generateThumnail(urlVideo, fromTime: Float64(grabTime))
感谢@eric-d 发现了这个post:
iOS Take Multiple Screen Shots
我设法发现添加:
assetImgGenerate.requestedTimeToleranceAfter = kCMTimeZero;
assetImgGenerate.requestedTimeToleranceBefore = kCMTimeZero;
...我的函数就可以了。
我更新后的函数如下所示:
func generateThumnail(url : NSURL, fromTime:Float64) -> UIImage {
var asset :AVAsset = AVAsset.assetWithURL(url) as! AVAsset
var assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.requestedTimeToleranceAfter = kCMTimeZero;
assetImgGenerate.requestedTimeToleranceBefore = kCMTimeZero;
var error : NSError? = nil
var time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
var img : CGImageRef = assetImgGenerate.copyCGImageAtTime(time, actualTime: nil, error: &error)
var frameImg : UIImage = UIImage(CGImage: img)!
return frameImg
}
var grabTime = 1.22
img = generateThumnail(urlVideo, fromTime: Float64(grabTime))
我将 arpo 的答案合并到我的项目中,更新为 Swift 3:
fileprivate func generateThumnail(url : URL, fromTime:Float64) -> UIImage? {
let asset :AVAsset = AVAsset(url: url)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.requestedTimeToleranceAfter = kCMTimeZero;
assetImgGenerate.requestedTimeToleranceBefore = kCMTimeZero;
let time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
if let img = try? assetImgGenerate.copyCGImage(at:time, actualTime: nil) {
return UIImage(cgImage: img!)
} else {
return nil
}
}
对于swift 4.2
fileprivate func generateThumnail(url : URL, fromTime:Float64) -> UIImage? {
let asset :AVAsset = AVAsset(url: url)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.requestedTimeToleranceAfter = CMTime.zero;
assetImgGenerate.requestedTimeToleranceBefore = CMTime.zero;
let time : CMTime = CMTimeMakeWithSeconds(fromTime, preferredTimescale: 600)
if let img = try? assetImgGenerate.copyCGImage(at:time, actualTime: nil) {
return UIImage(cgImage: img)
} else {
return nil
}
}
我想在特定时间从视频中抓取帧。我正在调用我的 grab-frame-function,时间指定为 Float64 秒。 问题是它没有抓取当前帧。它似乎忽略了小数点。如果我用例如 1.22 和 1.70 调用该函数,它将 return 相同的帧。我对 Swift 很陌生,所以我想我没有正确获取 CMTime 对象。那么有人能看出这有什么问题吗?
func generateThumnail(url : NSURL, fromTime:Float64) -> UIImage {
var asset :AVAsset = AVAsset.assetWithURL(url) as! AVAsset
var assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
var error : NSError? = nil
var time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
var img : CGImageRef = assetImgGenerate.copyCGImageAtTime(time, actualTime: nil, error: &error)
var frameImg : UIImage = UIImage(CGImage: img)!
return frameImg
}
var grabTime = 1.22
img = generateThumnail(urlVideo, fromTime: Float64(grabTime))
感谢@eric-d 发现了这个post: iOS Take Multiple Screen Shots
我设法发现添加:
assetImgGenerate.requestedTimeToleranceAfter = kCMTimeZero;
assetImgGenerate.requestedTimeToleranceBefore = kCMTimeZero;
...我的函数就可以了。
我更新后的函数如下所示:
func generateThumnail(url : NSURL, fromTime:Float64) -> UIImage {
var asset :AVAsset = AVAsset.assetWithURL(url) as! AVAsset
var assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.requestedTimeToleranceAfter = kCMTimeZero;
assetImgGenerate.requestedTimeToleranceBefore = kCMTimeZero;
var error : NSError? = nil
var time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
var img : CGImageRef = assetImgGenerate.copyCGImageAtTime(time, actualTime: nil, error: &error)
var frameImg : UIImage = UIImage(CGImage: img)!
return frameImg
}
var grabTime = 1.22
img = generateThumnail(urlVideo, fromTime: Float64(grabTime))
我将 arpo 的答案合并到我的项目中,更新为 Swift 3:
fileprivate func generateThumnail(url : URL, fromTime:Float64) -> UIImage? {
let asset :AVAsset = AVAsset(url: url)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.requestedTimeToleranceAfter = kCMTimeZero;
assetImgGenerate.requestedTimeToleranceBefore = kCMTimeZero;
let time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
if let img = try? assetImgGenerate.copyCGImage(at:time, actualTime: nil) {
return UIImage(cgImage: img!)
} else {
return nil
}
}
对于swift 4.2
fileprivate func generateThumnail(url : URL, fromTime:Float64) -> UIImage? {
let asset :AVAsset = AVAsset(url: url)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.requestedTimeToleranceAfter = CMTime.zero;
assetImgGenerate.requestedTimeToleranceBefore = CMTime.zero;
let time : CMTime = CMTimeMakeWithSeconds(fromTime, preferredTimescale: 600)
if let img = try? assetImgGenerate.copyCGImage(at:time, actualTime: nil) {
return UIImage(cgImage: img)
} else {
return nil
}
}