ArCore Sceneform:检测图像时播放 .mp4 视频
ArCore Sceneform: Play .mp4 video when detect image
当我找到一张图片时,我想在其上方放置一段文字和一段视频。文本视图放在场景上但视频没有,它只是在中间添加到我的主布局中。
我正在使用组件 VideoView,我不确定这是问题所在
override fun onCreate(savedInstanceState: Bundle?) {
(....)
arFragment!!.arSceneView.scene.addOnUpdateListener { this.onUpdateFrame(it) }
arSceneView = arFragment!!.arSceneView
}
private fun onUpdateFrame(frameTime: FrameTime) {
val frame = arFragment!!.arSceneView.arFrame
val augmentedImages = frame.getUpdatedTrackables(AugmentedImage::class.java)
for (augmentedImage in augmentedImages) {
if (augmentedImage.trackingState == TrackingState.TRACKING) {
if (augmentedImage.name.contains("car") && !modelCarAdded) {
renderView(arFragment!!,
augmentedImage.createAnchor(augmentedImage.centerPose))
modelCarAdded = true
}
}
}
}
text_info只是一个TextView组件,video_youtube是一个RelativeLayout,里面有VideoView。
private fun renderView(fragment: ArFragment, anchor: Anchor) {
//WORKING
ViewRenderable.builder()
.setView(this, R.layout.text_info)
.build()
.thenAccept { renderable ->
(renderable.view as TextView).text = "Example"
addNodeToScene(fragment, anchor, renderable, Vector3(0f, 0.2f, 0f))
}
.exceptionally { throwable ->
val builder = AlertDialog.Builder(this)
builder.setMessage(throwable.message)
.setTitle("Error!")
val dialog = builder.create()
dialog.show()
null
}
//NOT WORKING
ViewRenderable.builder()
.setView(this, R.layout.video_youtube)
.build()
.thenAccept { renderable ->
val view = renderable.view
videoRenderable = renderable
val path = "android.resource://" + packageName + "/" + R.raw.googlepixel
view.video_player.setVideoURI(Uri.parse(path))
renderable.material.setExternalTexture("videoTexture", texture)
val videoNode = addNodeToScene(fragment, anchor, renderable, Vector3(0.2f, 0.5f, 0f))
if (!view.video_player.isPlaying) {
view.video_player.start()
texture
.surfaceTexture
.setOnFrameAvailableListener {
videoNode.renderable = videoRenderable
texture.surfaceTexture.setOnFrameAvailableListener(null)
}
} else {
videoNode.renderable = videoRenderable
}
}
.exceptionally { throwable ->
null
}
}
private fun addNodeToScene(fragment: ArFragment, anchor: Anchor, renderable: Renderable, vector3: Vector3): Node {
val anchorNode = AnchorNode(anchor)
val node = TransformableNode(fragment.transformationSystem)
node.renderable = renderable
node.setParent(anchorNode)
node.localPosition = vector3
fragment.arSceneView.scene.addChild(anchorNode)
return node
}
我尝试使用色度键视频示例,但我不希望视频的白色部分是透明的。而且我不确定我是否需要模型 (.sfb) 来显示视频。
我使用 ChromaKey sample 作为起点。
首先,我更改了用于视频的自定义 material,添加了一个标志以禁用色键过滤。
material {
"name" : "Chroma Key Video Material",
"defines" : [
"baseColor"
],
"parameters" : [
{
// The texture displaying the frames of the video.
"type" : "samplerExternal",
"name" : "videoTexture"
},
{
// The color to filter out of the video.
"type" : "float4",
"name" : "keyColor"
},
{
"type" : "bool",
"name" : "disableChromaKey",
}
],
"requires" : [
"position",
"uv0"
],
"shadingModel" : "unlit",
// Blending is "masked" instead of "transparent" so that the shadows account for the
// transparent regions of the video instead of just the shape of the mesh.
"blending" : "masked",
// Material is double sided so that the video is visible when walking behind it.
"doubleSided" : true
}
fragment {
vec3 desaturate(vec3 color, float amount) {
// Convert color to grayscale using Luma formula:
// https://en.wikipedia.org/wiki/Luma_%28video%29
vec3 gray = vec3(dot(vec3(0.2126, 0.7152, 0.0722), color));
return vec3(mix(color, gray, amount));
}
void material(inout MaterialInputs material) {
prepareMaterial(material);
vec2 uv = getUV0();
if (!gl_FrontFacing) {
uv.x = 1.0 - uv.x;
}
vec4 color = texture(materialParams_videoTexture, uv).rgba;
if (!materialParams.disableChromaKey) {
vec3 keyColor = materialParams.keyColor.rgb;
float threshold = 0.675;
float slope = 0.2;
float distance = abs(length(abs(keyColor - color.rgb)));
float edge0 = threshold * (1.0 - slope);
float alpha = smoothstep(edge0, threshold, distance);
color.rgb = desaturate(color.rgb, 1.0 - (alpha * alpha * alpha));
material.baseColor.a = alpha;
material.baseColor.rgb = inverseTonemapSRGB(color.rgb);
material.baseColor.rgb *= material.baseColor.a;
} else {
material.baseColor = color;
}
}
}
然后在 .sfa 文件中将 `disableChromaKey' 设置为 false:
materials: [
{
name: 'DefaultMaterial',
parameters: [
{
videoTexture: {
external_path: 'MISSING_PATH',
},
},
{
keyColor: [
0,
0,
0,
0,
],
},
{
disableChromaKey : true,
}
],
source: 'sampledata/models/chroma_key_video_material.mat',
},
],
然后我根据 hittest 的锚点放置视频节点,并在其上方放置一个 ViewRenderable 节点用于文本。
private Node createVideoDisplay(final AnchorNode parent, Vector3 localPosition, String title) {
// Create a node to render the video and add it to the anchor.
Node videoNode = new Node();
videoNode.setParent(parent);
videoNode.setLocalPosition(localPosition);
// Set the scale of the node so that the aspect ratio of the video is correct.
float videoWidth = mediaPlayer.getVideoWidth();
float videoHeight = mediaPlayer.getVideoHeight();
videoNode.setLocalScale(
new Vector3(
VIDEO_HEIGHT_METERS * (videoWidth / videoHeight),
VIDEO_HEIGHT_METERS, 1.0f));
// Place the text above the video
final float videoNodeHeight = VIDEO_HEIGHT_METERS+ localPosition.y;
ViewRenderable.builder().setView(this,R.layout.video_title)
.build().thenAccept(viewRenderable -> {
Node titleNode = new Node();
titleNode.setLocalPosition(new Vector3(0,videoNodeHeight,0));
titleNode.setParent(parent);
titleNode.setRenderable(viewRenderable);
((TextView)viewRenderable.getView().findViewById(R.id.video_text))
.setText(title);
});
return videoNode;
}
当我找到一张图片时,我想在其上方放置一段文字和一段视频。文本视图放在场景上但视频没有,它只是在中间添加到我的主布局中。 我正在使用组件 VideoView,我不确定这是问题所在
override fun onCreate(savedInstanceState: Bundle?) {
(....)
arFragment!!.arSceneView.scene.addOnUpdateListener { this.onUpdateFrame(it) }
arSceneView = arFragment!!.arSceneView
}
private fun onUpdateFrame(frameTime: FrameTime) {
val frame = arFragment!!.arSceneView.arFrame
val augmentedImages = frame.getUpdatedTrackables(AugmentedImage::class.java)
for (augmentedImage in augmentedImages) {
if (augmentedImage.trackingState == TrackingState.TRACKING) {
if (augmentedImage.name.contains("car") && !modelCarAdded) {
renderView(arFragment!!,
augmentedImage.createAnchor(augmentedImage.centerPose))
modelCarAdded = true
}
}
}
}
text_info只是一个TextView组件,video_youtube是一个RelativeLayout,里面有VideoView。
private fun renderView(fragment: ArFragment, anchor: Anchor) {
//WORKING
ViewRenderable.builder()
.setView(this, R.layout.text_info)
.build()
.thenAccept { renderable ->
(renderable.view as TextView).text = "Example"
addNodeToScene(fragment, anchor, renderable, Vector3(0f, 0.2f, 0f))
}
.exceptionally { throwable ->
val builder = AlertDialog.Builder(this)
builder.setMessage(throwable.message)
.setTitle("Error!")
val dialog = builder.create()
dialog.show()
null
}
//NOT WORKING
ViewRenderable.builder()
.setView(this, R.layout.video_youtube)
.build()
.thenAccept { renderable ->
val view = renderable.view
videoRenderable = renderable
val path = "android.resource://" + packageName + "/" + R.raw.googlepixel
view.video_player.setVideoURI(Uri.parse(path))
renderable.material.setExternalTexture("videoTexture", texture)
val videoNode = addNodeToScene(fragment, anchor, renderable, Vector3(0.2f, 0.5f, 0f))
if (!view.video_player.isPlaying) {
view.video_player.start()
texture
.surfaceTexture
.setOnFrameAvailableListener {
videoNode.renderable = videoRenderable
texture.surfaceTexture.setOnFrameAvailableListener(null)
}
} else {
videoNode.renderable = videoRenderable
}
}
.exceptionally { throwable ->
null
}
}
private fun addNodeToScene(fragment: ArFragment, anchor: Anchor, renderable: Renderable, vector3: Vector3): Node {
val anchorNode = AnchorNode(anchor)
val node = TransformableNode(fragment.transformationSystem)
node.renderable = renderable
node.setParent(anchorNode)
node.localPosition = vector3
fragment.arSceneView.scene.addChild(anchorNode)
return node
}
我尝试使用色度键视频示例,但我不希望视频的白色部分是透明的。而且我不确定我是否需要模型 (.sfb) 来显示视频。
我使用 ChromaKey sample 作为起点。
首先,我更改了用于视频的自定义 material,添加了一个标志以禁用色键过滤。
material {
"name" : "Chroma Key Video Material",
"defines" : [
"baseColor"
],
"parameters" : [
{
// The texture displaying the frames of the video.
"type" : "samplerExternal",
"name" : "videoTexture"
},
{
// The color to filter out of the video.
"type" : "float4",
"name" : "keyColor"
},
{
"type" : "bool",
"name" : "disableChromaKey",
}
],
"requires" : [
"position",
"uv0"
],
"shadingModel" : "unlit",
// Blending is "masked" instead of "transparent" so that the shadows account for the
// transparent regions of the video instead of just the shape of the mesh.
"blending" : "masked",
// Material is double sided so that the video is visible when walking behind it.
"doubleSided" : true
}
fragment {
vec3 desaturate(vec3 color, float amount) {
// Convert color to grayscale using Luma formula:
// https://en.wikipedia.org/wiki/Luma_%28video%29
vec3 gray = vec3(dot(vec3(0.2126, 0.7152, 0.0722), color));
return vec3(mix(color, gray, amount));
}
void material(inout MaterialInputs material) {
prepareMaterial(material);
vec2 uv = getUV0();
if (!gl_FrontFacing) {
uv.x = 1.0 - uv.x;
}
vec4 color = texture(materialParams_videoTexture, uv).rgba;
if (!materialParams.disableChromaKey) {
vec3 keyColor = materialParams.keyColor.rgb;
float threshold = 0.675;
float slope = 0.2;
float distance = abs(length(abs(keyColor - color.rgb)));
float edge0 = threshold * (1.0 - slope);
float alpha = smoothstep(edge0, threshold, distance);
color.rgb = desaturate(color.rgb, 1.0 - (alpha * alpha * alpha));
material.baseColor.a = alpha;
material.baseColor.rgb = inverseTonemapSRGB(color.rgb);
material.baseColor.rgb *= material.baseColor.a;
} else {
material.baseColor = color;
}
}
}
然后在 .sfa 文件中将 `disableChromaKey' 设置为 false:
materials: [
{
name: 'DefaultMaterial',
parameters: [
{
videoTexture: {
external_path: 'MISSING_PATH',
},
},
{
keyColor: [
0,
0,
0,
0,
],
},
{
disableChromaKey : true,
}
],
source: 'sampledata/models/chroma_key_video_material.mat',
},
],
然后我根据 hittest 的锚点放置视频节点,并在其上方放置一个 ViewRenderable 节点用于文本。
private Node createVideoDisplay(final AnchorNode parent, Vector3 localPosition, String title) {
// Create a node to render the video and add it to the anchor.
Node videoNode = new Node();
videoNode.setParent(parent);
videoNode.setLocalPosition(localPosition);
// Set the scale of the node so that the aspect ratio of the video is correct.
float videoWidth = mediaPlayer.getVideoWidth();
float videoHeight = mediaPlayer.getVideoHeight();
videoNode.setLocalScale(
new Vector3(
VIDEO_HEIGHT_METERS * (videoWidth / videoHeight),
VIDEO_HEIGHT_METERS, 1.0f));
// Place the text above the video
final float videoNodeHeight = VIDEO_HEIGHT_METERS+ localPosition.y;
ViewRenderable.builder().setView(this,R.layout.video_title)
.build().thenAccept(viewRenderable -> {
Node titleNode = new Node();
titleNode.setLocalPosition(new Vector3(0,videoNodeHeight,0));
titleNode.setParent(parent);
titleNode.setRenderable(viewRenderable);
((TextView)viewRenderable.getView().findViewById(R.id.video_text))
.setText(title);
});
return videoNode;
}