在 ARKit 的 SCNPlane 上使用 GPUImage 对视频进行颜色键控
Color keying video with GPUImage on a SCNPlane in ARKit
我正在尝试播放视频,在 ARSCNView
中显示透明度。 SCNPlane
用作视频的投影 space,我正在尝试使用 GPUImage
为该视频设置颜色键。
我关注了this example这里。不幸的是,我还没有找到将该视频投影回我的 videoSpriteKitNode
的方法。因为过滤器在 GPUImageView
中呈现,而 SKVideoNode
需要 AVPlayer
.
我不确定这是否可能,我正在尝试做什么,所以如果有人可以分享他们的见解,我将非常感激!
import UIKit
import ARKit
import GPUImage
class ARTransVC: UIViewController{
@IBOutlet weak var sceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
var movie: GPUImageMovie!
var filter: GPUImageChromaKeyBlendFilter!
var sourcePicture: GPUImagePicture!
var player = AVPlayer()
var gpuImageView: GPUImageView!
override func viewDidLoad() {
super.viewDidLoad()
self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.sceneView.session.run(configuration)
self.gpuImageView = GPUImageView()
self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false
//a delay for ARKit to capture the surroundings
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
// A SpriteKit scene to contain the SpriteKit video node
let spriteKitScene = SKScene(size: CGSize(width: self.sceneView.frame.width, height: self.sceneView.frame.height))
spriteKitScene.scaleMode = .aspectFit
// Create a video player, which will be responsible for the playback of the video material
guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { return }
let playerItem = AVPlayerItem(url: url)
self.player.replaceCurrentItem(with: playerItem)
//trans
self.filter = GPUImageChromaKeyBlendFilter()
self.filter.thresholdSensitivity = 0.15
self.filter.smoothing = 0.3
self.filter.setColorToReplaceRed(0.322, green: 0.455, blue: 0.831)
self.movie = GPUImageMovie(playerItem: playerItem)
self.movie.playAtActualSpeed = true
self.movie.addTarget(self.filter)
self.movie.startProcessing()
let backgroundImage = UIImage(named: "transparent.png")
self.sourcePicture = GPUImagePicture(image: backgroundImage, smoothlyScaleOutput: true)!
self.sourcePicture.addTarget(self.filter)
self.sourcePicture.processImage()
///HERE DON'T KNOW HOW TO CONTINUE ?
self.filter.addTarget(self.gpuImageView)
// To make the video loop
self.player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(
self,
selector: #selector(ARTransVC.playerItemDidReachEnd),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: self.player.currentItem)
// Create the SpriteKit video node, containing the video player
let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()
spriteKitScene.addChild(videoSpriteKitNode)
// Create the SceneKit scene
let scene = SCNScene()
self.sceneView.scene = scene
self.sceneView.isPlaying = true
// Create a SceneKit plane and add the SpriteKit scene as its material
let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
background.firstMaterial?.diffuse.contents = spriteKitScene
let backgroundNode = SCNNode(geometry: background)
backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
backgroundNode.position = SCNVector3(0,0,-2.0)
scene.rootNode.addChildNode(backgroundNode)
}
}
@objc func playerItemDidReachEnd(notification: NSNotification) {
if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: kCMTimeZero, completionHandler: nil)
}
}
}
尝试清除背景并将缩放模式设置为
backgroundColor = .clear
scaleMode = .aspectFit
所以,我现在设法在 ARSCNView
中进行色度键控并播放现在透明的视频,但它仍然是一个有点不完整的解决方案。
我放弃了以前的方法并实施了 ChromaKeyMaterial from Lësha Turkowski!
这里是,调成我要抠的颜色:
import SceneKit
public class ChromaKeyMaterial: SCNMaterial {
public var backgroundColor: UIColor {
didSet { didSetBackgroundColor() }
}
public var thresholdSensitivity: Float {
didSet { didSetThresholdSensitivity() }
}
public var smoothing: Float {
didSet { didSetSmoothing() }
}
public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.15, smoothing: Float = 0.0) {
self.backgroundColor = backgroundColor
self.thresholdSensitivity = thresholdSensitivity
self.smoothing = smoothing
super.init()
didSetBackgroundColor()
didSetThresholdSensitivity()
didSetSmoothing()
// chroma key shader is based on GPUImage
// https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m
let surfaceShader =
"""
uniform vec3 c_colorToReplace;
uniform float c_thresholdSensitivity;
uniform float c_smoothing;
#pragma transparent
#pragma body
vec3 textureColor = _surface.diffuse.rgb;
float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b;
float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
float maskCb = 0.5647 * (c_colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
float a = blendValue;
_surface.transparent.a = a;
"""
//_surface.transparent.a = a;
shaderModifiers = [
.surface: surfaceShader,
]
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//setting background color to be keyed out
private func didSetBackgroundColor() {
//getting pixel from background color
//let rgb = backgroundColor.cgColor.components!.map{Float([=10=])}
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2])
let vector = SCNVector3(x: 0.216, y: 0.357, z: 0.663)
setValue(vector, forKey: "c_colorToReplace")
}
private func didSetSmoothing() {
setValue(smoothing, forKey: "c_smoothing")
}
private func didSetThresholdSensitivity() {
setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity")
}
}
这是在 SCNPlane 上播放 ARKit 中的键控视频的代码:
import UIKit
import ARKit
class ARTransVC: UIViewController{
@IBOutlet weak var arSceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
private var player: AVPlayer = {
guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() }
return AVPlayer(url: url)
}()
override func viewDidLoad() {
super.viewDidLoad()
self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.arSceneView.session.run(configuration)
//a delay for ARKit to capture the surroundings
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
// A SpriteKit scene to contain the SpriteKit video node
let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height))
spriteKitScene.scaleMode = .aspectFit
spriteKitScene.backgroundColor = .clear
spriteKitScene.scaleMode = .aspectFit
//Create the SpriteKit video node, containing the video player
let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()
spriteKitScene.addChild(videoSpriteKitNode)
// To make the video loop
self.player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(
self,
selector: #selector(ARTransVC.playerItemDidReachEnd),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: self.player.currentItem)
// Create the SceneKit scene
let scene = SCNScene()
self.arSceneView.scene = scene
//Create a SceneKit plane and add the SpriteKit scene as its material
let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
background.firstMaterial?.diffuse.contents = spriteKitScene
let chromaKeyMaterial = ChromaKeyMaterial()
chromaKeyMaterial.diffuse.contents = self.player
let backgroundNode = SCNNode(geometry: background)
backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
backgroundNode.geometry!.materials = [chromaKeyMaterial]
backgroundNode.position = SCNVector3(0,0,-2.0)
scene.rootNode.addChildNode(backgroundNode)
//video does not start without delaying the player
//playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
self.player.seek(to:CMTimeMakeWithSeconds(1, 1000))
self.player.play()
}
}
}
@objc func playerItemDidReachEnd(notification: NSNotification) {
if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: kCMTimeZero, completionHandler: nil)
}
}
我得到了 [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
,这显然是 iOS 11.2 中的 。现在,我只是找到了一个相当不完整的解决方案,即在延迟一秒后重新启动视频。非常感谢更好的方法。
我正在尝试播放视频,在 ARSCNView
中显示透明度。 SCNPlane
用作视频的投影 space,我正在尝试使用 GPUImage
为该视频设置颜色键。
我关注了this example这里。不幸的是,我还没有找到将该视频投影回我的 videoSpriteKitNode
的方法。因为过滤器在 GPUImageView
中呈现,而 SKVideoNode
需要 AVPlayer
.
我不确定这是否可能,我正在尝试做什么,所以如果有人可以分享他们的见解,我将非常感激!
import UIKit
import ARKit
import GPUImage
class ARTransVC: UIViewController{
@IBOutlet weak var sceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
var movie: GPUImageMovie!
var filter: GPUImageChromaKeyBlendFilter!
var sourcePicture: GPUImagePicture!
var player = AVPlayer()
var gpuImageView: GPUImageView!
override func viewDidLoad() {
super.viewDidLoad()
self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.sceneView.session.run(configuration)
self.gpuImageView = GPUImageView()
self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false
//a delay for ARKit to capture the surroundings
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
// A SpriteKit scene to contain the SpriteKit video node
let spriteKitScene = SKScene(size: CGSize(width: self.sceneView.frame.width, height: self.sceneView.frame.height))
spriteKitScene.scaleMode = .aspectFit
// Create a video player, which will be responsible for the playback of the video material
guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { return }
let playerItem = AVPlayerItem(url: url)
self.player.replaceCurrentItem(with: playerItem)
//trans
self.filter = GPUImageChromaKeyBlendFilter()
self.filter.thresholdSensitivity = 0.15
self.filter.smoothing = 0.3
self.filter.setColorToReplaceRed(0.322, green: 0.455, blue: 0.831)
self.movie = GPUImageMovie(playerItem: playerItem)
self.movie.playAtActualSpeed = true
self.movie.addTarget(self.filter)
self.movie.startProcessing()
let backgroundImage = UIImage(named: "transparent.png")
self.sourcePicture = GPUImagePicture(image: backgroundImage, smoothlyScaleOutput: true)!
self.sourcePicture.addTarget(self.filter)
self.sourcePicture.processImage()
///HERE DON'T KNOW HOW TO CONTINUE ?
self.filter.addTarget(self.gpuImageView)
// To make the video loop
self.player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(
self,
selector: #selector(ARTransVC.playerItemDidReachEnd),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: self.player.currentItem)
// Create the SpriteKit video node, containing the video player
let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()
spriteKitScene.addChild(videoSpriteKitNode)
// Create the SceneKit scene
let scene = SCNScene()
self.sceneView.scene = scene
self.sceneView.isPlaying = true
// Create a SceneKit plane and add the SpriteKit scene as its material
let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
background.firstMaterial?.diffuse.contents = spriteKitScene
let backgroundNode = SCNNode(geometry: background)
backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
backgroundNode.position = SCNVector3(0,0,-2.0)
scene.rootNode.addChildNode(backgroundNode)
}
}
@objc func playerItemDidReachEnd(notification: NSNotification) {
if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: kCMTimeZero, completionHandler: nil)
}
}
}
尝试清除背景并将缩放模式设置为
backgroundColor = .clear
scaleMode = .aspectFit
所以,我现在设法在 ARSCNView
中进行色度键控并播放现在透明的视频,但它仍然是一个有点不完整的解决方案。
我放弃了以前的方法并实施了 ChromaKeyMaterial from Lësha Turkowski!
这里是,调成我要抠的颜色:
import SceneKit
public class ChromaKeyMaterial: SCNMaterial {
public var backgroundColor: UIColor {
didSet { didSetBackgroundColor() }
}
public var thresholdSensitivity: Float {
didSet { didSetThresholdSensitivity() }
}
public var smoothing: Float {
didSet { didSetSmoothing() }
}
public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.15, smoothing: Float = 0.0) {
self.backgroundColor = backgroundColor
self.thresholdSensitivity = thresholdSensitivity
self.smoothing = smoothing
super.init()
didSetBackgroundColor()
didSetThresholdSensitivity()
didSetSmoothing()
// chroma key shader is based on GPUImage
// https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m
let surfaceShader =
"""
uniform vec3 c_colorToReplace;
uniform float c_thresholdSensitivity;
uniform float c_smoothing;
#pragma transparent
#pragma body
vec3 textureColor = _surface.diffuse.rgb;
float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b;
float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
float maskCb = 0.5647 * (c_colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
float a = blendValue;
_surface.transparent.a = a;
"""
//_surface.transparent.a = a;
shaderModifiers = [
.surface: surfaceShader,
]
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//setting background color to be keyed out
private func didSetBackgroundColor() {
//getting pixel from background color
//let rgb = backgroundColor.cgColor.components!.map{Float([=10=])}
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2])
let vector = SCNVector3(x: 0.216, y: 0.357, z: 0.663)
setValue(vector, forKey: "c_colorToReplace")
}
private func didSetSmoothing() {
setValue(smoothing, forKey: "c_smoothing")
}
private func didSetThresholdSensitivity() {
setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity")
}
}
这是在 SCNPlane 上播放 ARKit 中的键控视频的代码:
import UIKit
import ARKit
class ARTransVC: UIViewController{
@IBOutlet weak var arSceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
private var player: AVPlayer = {
guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() }
return AVPlayer(url: url)
}()
override func viewDidLoad() {
super.viewDidLoad()
self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
self.arSceneView.session.run(configuration)
//a delay for ARKit to capture the surroundings
DispatchQueue.main.asyncAfter(deadline: .now() + 3) {
// A SpriteKit scene to contain the SpriteKit video node
let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height))
spriteKitScene.scaleMode = .aspectFit
spriteKitScene.backgroundColor = .clear
spriteKitScene.scaleMode = .aspectFit
//Create the SpriteKit video node, containing the video player
let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()
spriteKitScene.addChild(videoSpriteKitNode)
// To make the video loop
self.player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(
self,
selector: #selector(ARTransVC.playerItemDidReachEnd),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: self.player.currentItem)
// Create the SceneKit scene
let scene = SCNScene()
self.arSceneView.scene = scene
//Create a SceneKit plane and add the SpriteKit scene as its material
let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
background.firstMaterial?.diffuse.contents = spriteKitScene
let chromaKeyMaterial = ChromaKeyMaterial()
chromaKeyMaterial.diffuse.contents = self.player
let backgroundNode = SCNNode(geometry: background)
backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
backgroundNode.geometry!.materials = [chromaKeyMaterial]
backgroundNode.position = SCNVector3(0,0,-2.0)
scene.rootNode.addChildNode(backgroundNode)
//video does not start without delaying the player
//playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
self.player.seek(to:CMTimeMakeWithSeconds(1, 1000))
self.player.play()
}
}
}
@objc func playerItemDidReachEnd(notification: NSNotification) {
if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
playerItem.seek(to: kCMTimeZero, completionHandler: nil)
}
}
我得到了 [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
,这显然是 iOS 11.2 中的