Swift 球体上的视频,黑屏但可以听到声音
Swift video on sphere, black screen but can hear audio
我正在尝试制作一个 360 度视频播放器,方法是将视频投影到球体上,然后将法线从里到外翻转并将相机放在球体内,但是当我尝试时出现黑屏在球体上播放视频,但我能听到音频播放。
override func viewDidLoad() {
super.viewDidLoad()
//create sphere
makeSphere();
//create user-interface
resizeScreen();
}
ViewDidLoad ^ 我调用 resizeScreen();
使视频播放器 'adapt' 显示在屏幕上。
//set variables
var player : AVPlayer? = nil;
var playerLayer : AVPlayerLayer? = nil;
var asset : AVAsset? = nil;
var playerItem : AVPlayerItem? = nil;
var timer = NSTimer();
//start functions
func resizeScreen() {
//Get device screen width & height
var screenSize: CGRect = UIScreen.mainScreen().bounds;
//set screen width and height in variables
var screenWidth = screenSize.width;
var screenHeight = screenSize.height;
//change size of video player
playerLayer?.frame.size.height = screenHeight * 0.75;
playerLayer?.frame.size.width = screenWidth * 1.0;
//change size of button
PlayVid.frame.size.height = screenHeight * 0.75;
PlayVid.frame.size.width = screenWidth * 1;
//set position of text label
TemporaryURL.center.x = screenWidth * 0.5;
TemporaryURL.center.y = screenHeight * 0.9
}
resizeScreen();
,如上所述使视频播放器和按钮适应屏幕大小。
func makeSphere() {
let sceneView = SCNView(frame: self.view.frame);
self.view.addSubview(sceneView);
//Get device screen width & height
var screenSize: CGRect = UIScreen.mainScreen().bounds;
//set screen width and height in variables
var screenWidth = screenSize.width;
var screenHeight = screenSize.height;
//set scene's height, width and x-position
sceneView.frame.size.height = screenHeight * 0.75;
sceneView.frame.size.width = screenWidth * 1;
sceneView.center.x = screenWidth * 0.5;
//create scene
let scene = SCNScene();
sceneView.scene = scene;
//camera positioning
let camera = SCNCamera();
let cameraNode = SCNNode();
cameraNode.camera = camera;
cameraNode.position = SCNVector3(x: -3.0, y: 3.0, z: 3.0);
//lighting
let light = SCNLight();
light.type = SCNLightTypeOmni;
let lightNode = SCNNode();
lightNode.light = light;
lightNode.position = SCNVector3(x: 0, y: 0, z: 0);
//create a sphere
let sphereGeometry = SCNSphere(radius: 2.5);
//create sphere node
let sphereNode = SCNNode(geometry: sphereGeometry);
//create constraint for sphere
let constraint = SCNLookAtConstraint(target: sphereNode);
//constraint.gimbalLockEnabled = true;
cameraNode.constraints = [constraint];
//set nodes
//scene.rootNode.addChildNode(lightNode);
scene.rootNode.addChildNode(cameraNode);
scene.rootNode.addChildNode(sphereNode);
inlineVideo();
player!.play();
//make variable
let videoMaterial = SCNMaterial();
var imgURL = NSURL(string: "http://imgurl");
var videoURLWithPath = "http://videourl";
var videoURL = NSURL(string: videoURLWithPath);
//apply texture to variable
videoMaterial.diffuse.contents = AVPlayerLayer(player: self.player);
videoMaterial.specular.contents = UIColor.redColor();
videoMaterial.shininess = 1.0;
//set texture on object of name sphereGeomtetry
sphereGeometry.materials = [videoMaterial];
}
makeSphere();这将创建球体并为所述球体初始化视频。
func inlineVideo(){
//play inline video
//insert url of video into textfield
var PasteBoard = UIPasteboard.generalPasteboard().string;
TemporaryURL.text = PasteBoard;
//get path of video
//var videoURLWithPath = PasteBoard;
var videoURLWithPath = "http://videourl";
var videoURL = NSURL(string: videoURLWithPath);
if (videoURL == nil){
videoURL = NSURL(string: "http://videourl");
}else{
//do nothing
}
//get video url & set it
asset = AVAsset(URL: videoURL!) as AVAsset;
playerItem = AVPlayerItem(asset:asset!);
//set target for video
player = AVPlayer(playerItem: self.playerItem!);
playerLayer = AVPlayerLayer(player: self.player);
}
这是我用来初始化常规视频的视频播放器代码,我尝试将PlayerLayer绑定到球体的纹理,但我得到的只是黑屏,尽管我听到了声音,为什么投影在球体上的视频不是吗?我认为像这样的东西应该有用吗?
您需要使用 SKVideoNode
在球体上显示视频,请参阅 - 仍然是小问题,修复后会更新。
检查以下链接:
- https://github.com/nomtek/spherical_video_player_ios
- https://github.com/Aralekk/simple360player_iOS
- https://github.com/hanton/HTY360Player
有 3 种不同的方法(例如 OpenGL/SceneKit、shader/fixed-function 管道、swift/obj-c、RGB/YCbCr)所以每个人都应该找到任何方便的代码。
设置播放器并开始播放,然后将其指定为 material 以使其正常工作。
var player = AVPlayer(url: yourURL)
player.play()
node.geo.material.emissive.contents = player
我正在尝试制作一个 360 度视频播放器,方法是将视频投影到球体上,然后将法线从里到外翻转并将相机放在球体内,但是当我尝试时出现黑屏在球体上播放视频,但我能听到音频播放。
override func viewDidLoad() {
super.viewDidLoad()
//create sphere
makeSphere();
//create user-interface
resizeScreen();
}
ViewDidLoad ^ 我调用 resizeScreen();
使视频播放器 'adapt' 显示在屏幕上。
//set variables
var player : AVPlayer? = nil;
var playerLayer : AVPlayerLayer? = nil;
var asset : AVAsset? = nil;
var playerItem : AVPlayerItem? = nil;
var timer = NSTimer();
//start functions
func resizeScreen() {
//Get device screen width & height
var screenSize: CGRect = UIScreen.mainScreen().bounds;
//set screen width and height in variables
var screenWidth = screenSize.width;
var screenHeight = screenSize.height;
//change size of video player
playerLayer?.frame.size.height = screenHeight * 0.75;
playerLayer?.frame.size.width = screenWidth * 1.0;
//change size of button
PlayVid.frame.size.height = screenHeight * 0.75;
PlayVid.frame.size.width = screenWidth * 1;
//set position of text label
TemporaryURL.center.x = screenWidth * 0.5;
TemporaryURL.center.y = screenHeight * 0.9
}
resizeScreen();
,如上所述使视频播放器和按钮适应屏幕大小。
func makeSphere() {
let sceneView = SCNView(frame: self.view.frame);
self.view.addSubview(sceneView);
//Get device screen width & height
var screenSize: CGRect = UIScreen.mainScreen().bounds;
//set screen width and height in variables
var screenWidth = screenSize.width;
var screenHeight = screenSize.height;
//set scene's height, width and x-position
sceneView.frame.size.height = screenHeight * 0.75;
sceneView.frame.size.width = screenWidth * 1;
sceneView.center.x = screenWidth * 0.5;
//create scene
let scene = SCNScene();
sceneView.scene = scene;
//camera positioning
let camera = SCNCamera();
let cameraNode = SCNNode();
cameraNode.camera = camera;
cameraNode.position = SCNVector3(x: -3.0, y: 3.0, z: 3.0);
//lighting
let light = SCNLight();
light.type = SCNLightTypeOmni;
let lightNode = SCNNode();
lightNode.light = light;
lightNode.position = SCNVector3(x: 0, y: 0, z: 0);
//create a sphere
let sphereGeometry = SCNSphere(radius: 2.5);
//create sphere node
let sphereNode = SCNNode(geometry: sphereGeometry);
//create constraint for sphere
let constraint = SCNLookAtConstraint(target: sphereNode);
//constraint.gimbalLockEnabled = true;
cameraNode.constraints = [constraint];
//set nodes
//scene.rootNode.addChildNode(lightNode);
scene.rootNode.addChildNode(cameraNode);
scene.rootNode.addChildNode(sphereNode);
inlineVideo();
player!.play();
//make variable
let videoMaterial = SCNMaterial();
var imgURL = NSURL(string: "http://imgurl");
var videoURLWithPath = "http://videourl";
var videoURL = NSURL(string: videoURLWithPath);
//apply texture to variable
videoMaterial.diffuse.contents = AVPlayerLayer(player: self.player);
videoMaterial.specular.contents = UIColor.redColor();
videoMaterial.shininess = 1.0;
//set texture on object of name sphereGeomtetry
sphereGeometry.materials = [videoMaterial];
}
makeSphere();这将创建球体并为所述球体初始化视频。
func inlineVideo(){
//play inline video
//insert url of video into textfield
var PasteBoard = UIPasteboard.generalPasteboard().string;
TemporaryURL.text = PasteBoard;
//get path of video
//var videoURLWithPath = PasteBoard;
var videoURLWithPath = "http://videourl";
var videoURL = NSURL(string: videoURLWithPath);
if (videoURL == nil){
videoURL = NSURL(string: "http://videourl");
}else{
//do nothing
}
//get video url & set it
asset = AVAsset(URL: videoURL!) as AVAsset;
playerItem = AVPlayerItem(asset:asset!);
//set target for video
player = AVPlayer(playerItem: self.playerItem!);
playerLayer = AVPlayerLayer(player: self.player);
}
这是我用来初始化常规视频的视频播放器代码,我尝试将PlayerLayer绑定到球体的纹理,但我得到的只是黑屏,尽管我听到了声音,为什么投影在球体上的视频不是吗?我认为像这样的东西应该有用吗?
您需要使用 SKVideoNode
在球体上显示视频,请参阅
检查以下链接:
- https://github.com/nomtek/spherical_video_player_ios
- https://github.com/Aralekk/simple360player_iOS
- https://github.com/hanton/HTY360Player
有 3 种不同的方法(例如 OpenGL/SceneKit、shader/fixed-function 管道、swift/obj-c、RGB/YCbCr)所以每个人都应该找到任何方便的代码。
设置播放器并开始播放,然后将其指定为 material 以使其正常工作。
var player = AVPlayer(url: yourURL)
player.play()
node.geo.material.emissive.contents = player