Unity - 自更新 Unity 到 2021.1.18f1 后,EasyAR 3.1.0 网络摄像头黑屏
Unity - EasyAR 3.1.0 webcam black screen since updating Unity to 2021.1.18f1
EasyAR 3.1.0 在旧版本的 Unity 中正常工作。网络摄像头按预期加载,但自从我更新到 Unity 2021.1.18f1 后,网络摄像头只显示黑屏。我使用此处提供的示例对其进行了测试:https://www.easyar.com/view/downloadHistory.html,结果相同:网络摄像头纹理保持黑色。
为了确保这不是我的网络摄像头的问题,我在这里尝试了这个示例:https://community.theta360.guide/t/simplest-webcam-test-in-unity/516/3 并且能够正确加载网络摄像头纹理。
我也尝试在 EasyAR 上使用另一个网络摄像头,但 EasyAR 网络摄像头纹理保持黑色。
我不知道问题是什么,因为它以前没有任何问题。 Unity 2020.2 和 2021.1 在加载网络摄像头方面有什么变化吗?
这是 CameraImageRenderer.cs 脚本:
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
using UnityEngine.Rendering;
namespace easyar
{
[RequireComponent(typeof(RenderCameraController))]
public class CameraImageRenderer : MonoBehaviour
{
private RenderCameraController controller;
private CommandBuffer commandBuffer;
private CameraImageMaterial arMaterial;
private Material material;
private CameraParameters cameraParameters;
private bool renderImageHFlip;
private UserRequest request;
public event Action<Material, Vector2> OnFrameRenderUpdate;
private event Action<Camera, RenderTexture> TargetTextureChange;
protected virtual void Awake()
{
controller = GetComponent<RenderCameraController>();
arMaterial = new CameraImageMaterial();
}
protected virtual void OnEnable()
{
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
}
protected virtual void OnDisable()
{
RemoveCommandBuffer(controller ? controller.TargetCamera : null);
}
protected virtual void OnDestroy()
{
arMaterial.Dispose();
if (request != null) { request.Dispose(); }
if (cameraParameters != null) { cameraParameters.Dispose(); }
}
public void RequestTargetTexture(Action<Camera, RenderTexture> targetTextureEventHandler)
{
if (request == null)
{
request = new UserRequest();
}
TargetTextureChange += targetTextureEventHandler;
RenderTexture texture;
request.UpdateTexture(controller ? controller.TargetCamera : null, material, out texture);
if (TargetTextureChange != null && texture)
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
public void DropTargetTexture(Action<Camera, RenderTexture> targetTextureEventHandler)
{
if (controller)
{
targetTextureEventHandler(controller.TargetCamera, null);
}
TargetTextureChange -= targetTextureEventHandler;
if (TargetTextureChange == null && request != null)
{
request.RemoveCommandBuffer(controller ? controller.TargetCamera : null);
request.Dispose();
request = null;
}
}
public void OnAssemble(ARSession session)
{
session.FrameChange += OnFrameChange;
session.FrameUpdate += OnFrameUpdate;
}
public void SetHFilp(bool hFlip)
{
renderImageHFlip = hFlip;
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
material = null;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null)
{
request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
return;
}
if (!enabled && request == null && OnFrameRenderUpdate == null)
{
return;
}
using (var frame = outputFrame.inputFrame())
{
using (var image = frame.image())
{
var materialUpdated = arMaterial.UpdateByImage(image);
if (material != materialUpdated)
{
material = materialUpdated;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null) { request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material); }
}
}
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
if (!controller || (!enabled && request == null && OnFrameRenderUpdate == null))
{
return;
}
if (request != null)
{
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
if (!material)
{
return;
}
bool cameraFront = cameraParameters.cameraDeviceType() == CameraDeviceType.Front ? true : false;
var imageProjection = cameraParameters.imageProjection(controller.TargetCamera.aspect, EasyARController.Instance.Display.Rotation, false, cameraFront? !renderImageHFlip : renderImageHFlip).ToUnityMatrix();
if (renderImageHFlip)
{
var translateMatrix = Matrix4x4.identity;
translateMatrix.m00 = -1;
imageProjection = translateMatrix * imageProjection;
}
material.SetMatrix("_TextureRotation", imageProjection);
if (OnFrameRenderUpdate != null)
{
OnFrameRenderUpdate(material, new Vector2(Screen.width * controller.TargetCamera.rect.width, Screen.height * controller.TargetCamera.rect.height));
}
}
private void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (enabled)
{
commandBuffer = new CommandBuffer();
commandBuffer.Blit(null, BuiltinRenderTextureType.CameraTarget, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
private void RemoveCommandBuffer(Camera cam)
{
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
private class UserRequest : IDisposable
{
private RenderTexture texture;
private CommandBuffer commandBuffer;
~UserRequest()
{
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
}
public void Dispose()
{
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
GC.SuppressFinalize(this);
}
public bool UpdateTexture(Camera cam, Material material, out RenderTexture tex)
{
tex = texture;
if (!cam || !material)
{
if (texture)
{
Destroy(texture);
tex = texture = null;
return true;
}
return false;
}
int w = (int)(Screen.width * cam.rect.width);
int h = (int)(Screen.height * cam.rect.height);
if (texture && (texture.width != w || texture.height != h))
{
Destroy(texture);
}
if (texture)
{
return false;
}
else
{
texture = new RenderTexture(w, h, 0);
UpdateCommandBuffer(cam, material);
tex = texture;
return true;
}
}
public void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (texture)
{
commandBuffer = new CommandBuffer();
commandBuffer.Blit(null, texture, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
public void RemoveCommandBuffer(Camera cam)
{
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
}
}
}
这是 VideoCameraDevice.cs 脚本:
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
namespace easyar
{
public class VideoCameraDevice : CameraSource
{
/// <summary>
/// EasyAR Sense API. Accessible between DeviceCreated and DeviceClosed event if available.
/// </summary>
public CameraDevice Device { get; private set; }
public CameraDeviceFocusMode FocusMode = CameraDeviceFocusMode.Continousauto;
public Vector2 CameraSize = new Vector2(1280, 960);
public CameraDeviceOpenMethod CameraOpenMethod = CameraDeviceOpenMethod.DeviceType;
[HideInInspector, SerializeField]
public CameraDeviceType CameraType = CameraDeviceType.Back;
[HideInInspector, SerializeField]
public int CameraIndex = 0;
[HideInInspector, SerializeField]
private CameraDevicePreference cameraPreference = CameraDevicePreference.PreferObjectSensing;
private CameraParameters parameters = null;
private bool willOpen;
public event Action DeviceCreated;
public event Action DeviceOpened;
public event Action DeviceClosed;
public enum CameraDeviceOpenMethod
{
DeviceType,
DeviceIndex,
}
public override int BufferCapacity
{
get
{
if (Device != null)
{
return Device.bufferCapacity();
}
return bufferCapacity;
}
set
{
bufferCapacity = value;
if (Device != null)
{
Device.setBufferCapacity(value);
}
}
}
public override bool HasSpatialInformation
{
get { return false; }
}
public CameraDevicePreference CameraPreference
{
get { return cameraPreference; }
// Switch to prefered FocusMode when switch CameraPreference.
// You can set other FocusMode after this, but the tracking results may differ.
set
{
cameraPreference = value;
switch (cameraPreference)
{
case CameraDevicePreference.PreferObjectSensing:
FocusMode = CameraDeviceFocusMode.Continousauto;
break;
case CameraDevicePreference.PreferSurfaceTracking:
FocusMode = CameraDeviceFocusMode.Medium;
break;
default:
break;
}
}
}
public CameraParameters Parameters
{
get
{
if (Device != null)
{
return Device.cameraParameters();
}
return parameters;
}
set
{
parameters = value;
}
}
protected override void OnEnable()
{
base.OnEnable();
if (Device != null)
{
Device.start();
}
}
protected override void Start()
{
if (!CameraDevice.isAvailable())
{
throw new UIPopupException(typeof(CameraDevice) + " not available");
}
base.Start();
}
protected override void OnDisable()
{
base.OnDisable();
if (Device != null)
{
Device.stop();
}
}
public override void Open()
{
willOpen = true;
CameraDevice.requestPermissions(EasyARController.Scheduler, (Action<PermissionStatus, string>)((status, msg) =>
{
if (!willOpen)
{
return;
}
if (status != PermissionStatus.Granted)
{
throw new UIPopupException("Camera permission not granted");
}
Close();
Device = CameraDeviceSelector.createCameraDevice(CameraPreference);
if (DeviceCreated != null)
{
DeviceCreated();
}
bool openResult = false;
switch (CameraOpenMethod)
{
case CameraDeviceOpenMethod.DeviceType:
openResult = Device.openWithPreferredType(CameraType);
break;
case CameraDeviceOpenMethod.DeviceIndex:
openResult = Device.openWithIndex(CameraIndex);
break;
default:
break;
}
if (!openResult)
{
Debug.LogError("Camera open failed");
Device.Dispose();
Device = null;
return;
}
Device.setFocusMode(FocusMode);
Device.setSize(new Vec2I((int)CameraSize.x, (int)CameraSize.y));
if (parameters != null)
{
Device.setCameraParameters(parameters);
}
if (bufferCapacity != 0)
{
Device.setBufferCapacity(bufferCapacity);
}
if (sink != null)
Device.inputFrameSource().connect(sink);
if (DeviceOpened != null)
{
DeviceOpened();
}
if (enabled)
{
OnEnable();
}
}));
}
public override void Close()
{
willOpen = false;
if (Device != null)
{
OnDisable();
Device.close();
Device.Dispose();
if (DeviceClosed != null)
{
DeviceClosed();
}
Device = null;
}
}
public override void Connect(InputFrameSink val)
{
base.Connect(val);
if (Device != null)
{
Device.inputFrameSource().connect(val);
}
}
}
}
这是 RenderCameraController.cs 脚本:
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using UnityEngine;
namespace easyar
{
public class RenderCameraController : MonoBehaviour
{
public Camera TargetCamera;
public RenderCameraParameters ExternalParameters;
private CameraImageRenderer cameraRenderer;
private Matrix4x4 currentDisplayCompensation = Matrix4x4.identity;
private CameraParameters cameraParameters;
private bool projectHFilp;
private ARSession arSession;
protected virtual void OnEnable()
{
if (arSession)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
}
protected virtual void OnDisable()
{
if (arSession)
{
arSession.FrameChange -= OnFrameChange;
arSession.FrameUpdate -= OnFrameUpdate;
}
}
protected virtual void OnDestroy()
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
if (ExternalParameters)
{
ExternalParameters.Dispose();
}
}
internal void OnAssemble(ARSession session)
{
arSession = session;
if (!TargetCamera)
{
TargetCamera = session.Assembly.Camera;
}
if (enabled)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
cameraRenderer = GetComponent<CameraImageRenderer>();
if (cameraRenderer)
{
cameraRenderer.OnAssemble(session);
}
}
internal void SetProjectHFlip(bool hFlip)
{
projectHFilp = hFlip;
}
internal void SetRenderImageHFilp(bool hFlip)
{
if (cameraRenderer)
{
cameraRenderer.SetHFilp(hFlip);
}
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
return;
}
currentDisplayCompensation = displayCompensation.inverse;
using (var frame = outputFrame.inputFrame())
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
if (ExternalParameters)
{
ExternalParameters.Build(cameraParameters);
}
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
var camParameters = ExternalParameters ? ExternalParameters.Parameters : cameraParameters;
var projection = camParameters.projection(TargetCamera.nearClipPlane, TargetCamera.farClipPlane, TargetCamera.aspect, EasyARController.Instance.Display.Rotation, false, false).ToUnityMatrix();
if (ExternalParameters)
{
projection *= ExternalParameters.Transform;
}
projection *= currentDisplayCompensation;
if (projectHFilp)
{
var translateMatrix = Matrix4x4.identity;
translateMatrix.m00 = -1;
projection = translateMatrix * projection;
}
TargetCamera.projectionMatrix = projection;
GL.invertCulling = projectHFilp;
}
}
}
我对如何解决这个问题感到绝望。
所以如果有人遇到同样的问题:
要解决黑屏,只需打开“CameraImageRenderer.cs”,转到第 180 行并替换该行
commandBuffer.Blit(null, BuiltinRenderTextureType.CameraTarget, material);
和
commandBuffer.Blit(material.HasProperty("_MainTex") ? material.GetTexture("_MainTex") : null, BuiltinRenderTextureType.CameraTarget, material);
此问题已在 EasyAR 4.1 的最新更新中得到解决。我在这里找到了解决方案:https://www.easyar.cn/view/questionDetails.html#163
EasyAR 3.1.0 在旧版本的 Unity 中正常工作。网络摄像头按预期加载,但自从我更新到 Unity 2021.1.18f1 后,网络摄像头只显示黑屏。我使用此处提供的示例对其进行了测试:https://www.easyar.com/view/downloadHistory.html,结果相同:网络摄像头纹理保持黑色。 为了确保这不是我的网络摄像头的问题,我在这里尝试了这个示例:https://community.theta360.guide/t/simplest-webcam-test-in-unity/516/3 并且能够正确加载网络摄像头纹理。
我也尝试在 EasyAR 上使用另一个网络摄像头,但 EasyAR 网络摄像头纹理保持黑色。
我不知道问题是什么,因为它以前没有任何问题。 Unity 2020.2 和 2021.1 在加载网络摄像头方面有什么变化吗?
这是 CameraImageRenderer.cs 脚本:
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
using UnityEngine.Rendering;
namespace easyar
{
[RequireComponent(typeof(RenderCameraController))]
public class CameraImageRenderer : MonoBehaviour
{
private RenderCameraController controller;
private CommandBuffer commandBuffer;
private CameraImageMaterial arMaterial;
private Material material;
private CameraParameters cameraParameters;
private bool renderImageHFlip;
private UserRequest request;
public event Action<Material, Vector2> OnFrameRenderUpdate;
private event Action<Camera, RenderTexture> TargetTextureChange;
protected virtual void Awake()
{
controller = GetComponent<RenderCameraController>();
arMaterial = new CameraImageMaterial();
}
protected virtual void OnEnable()
{
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
}
protected virtual void OnDisable()
{
RemoveCommandBuffer(controller ? controller.TargetCamera : null);
}
protected virtual void OnDestroy()
{
arMaterial.Dispose();
if (request != null) { request.Dispose(); }
if (cameraParameters != null) { cameraParameters.Dispose(); }
}
public void RequestTargetTexture(Action<Camera, RenderTexture> targetTextureEventHandler)
{
if (request == null)
{
request = new UserRequest();
}
TargetTextureChange += targetTextureEventHandler;
RenderTexture texture;
request.UpdateTexture(controller ? controller.TargetCamera : null, material, out texture);
if (TargetTextureChange != null && texture)
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
public void DropTargetTexture(Action<Camera, RenderTexture> targetTextureEventHandler)
{
if (controller)
{
targetTextureEventHandler(controller.TargetCamera, null);
}
TargetTextureChange -= targetTextureEventHandler;
if (TargetTextureChange == null && request != null)
{
request.RemoveCommandBuffer(controller ? controller.TargetCamera : null);
request.Dispose();
request = null;
}
}
public void OnAssemble(ARSession session)
{
session.FrameChange += OnFrameChange;
session.FrameUpdate += OnFrameUpdate;
}
public void SetHFilp(bool hFlip)
{
renderImageHFlip = hFlip;
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
material = null;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null)
{
request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
return;
}
if (!enabled && request == null && OnFrameRenderUpdate == null)
{
return;
}
using (var frame = outputFrame.inputFrame())
{
using (var image = frame.image())
{
var materialUpdated = arMaterial.UpdateByImage(image);
if (material != materialUpdated)
{
material = materialUpdated;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null) { request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material); }
}
}
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
if (!controller || (!enabled && request == null && OnFrameRenderUpdate == null))
{
return;
}
if (request != null)
{
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
if (!material)
{
return;
}
bool cameraFront = cameraParameters.cameraDeviceType() == CameraDeviceType.Front ? true : false;
var imageProjection = cameraParameters.imageProjection(controller.TargetCamera.aspect, EasyARController.Instance.Display.Rotation, false, cameraFront? !renderImageHFlip : renderImageHFlip).ToUnityMatrix();
if (renderImageHFlip)
{
var translateMatrix = Matrix4x4.identity;
translateMatrix.m00 = -1;
imageProjection = translateMatrix * imageProjection;
}
material.SetMatrix("_TextureRotation", imageProjection);
if (OnFrameRenderUpdate != null)
{
OnFrameRenderUpdate(material, new Vector2(Screen.width * controller.TargetCamera.rect.width, Screen.height * controller.TargetCamera.rect.height));
}
}
private void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (enabled)
{
commandBuffer = new CommandBuffer();
commandBuffer.Blit(null, BuiltinRenderTextureType.CameraTarget, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
private void RemoveCommandBuffer(Camera cam)
{
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
private class UserRequest : IDisposable
{
private RenderTexture texture;
private CommandBuffer commandBuffer;
~UserRequest()
{
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
}
public void Dispose()
{
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
GC.SuppressFinalize(this);
}
public bool UpdateTexture(Camera cam, Material material, out RenderTexture tex)
{
tex = texture;
if (!cam || !material)
{
if (texture)
{
Destroy(texture);
tex = texture = null;
return true;
}
return false;
}
int w = (int)(Screen.width * cam.rect.width);
int h = (int)(Screen.height * cam.rect.height);
if (texture && (texture.width != w || texture.height != h))
{
Destroy(texture);
}
if (texture)
{
return false;
}
else
{
texture = new RenderTexture(w, h, 0);
UpdateCommandBuffer(cam, material);
tex = texture;
return true;
}
}
public void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (texture)
{
commandBuffer = new CommandBuffer();
commandBuffer.Blit(null, texture, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
public void RemoveCommandBuffer(Camera cam)
{
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
}
}
}
这是 VideoCameraDevice.cs 脚本:
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
namespace easyar
{
public class VideoCameraDevice : CameraSource
{
/// <summary>
/// EasyAR Sense API. Accessible between DeviceCreated and DeviceClosed event if available.
/// </summary>
public CameraDevice Device { get; private set; }
public CameraDeviceFocusMode FocusMode = CameraDeviceFocusMode.Continousauto;
public Vector2 CameraSize = new Vector2(1280, 960);
public CameraDeviceOpenMethod CameraOpenMethod = CameraDeviceOpenMethod.DeviceType;
[HideInInspector, SerializeField]
public CameraDeviceType CameraType = CameraDeviceType.Back;
[HideInInspector, SerializeField]
public int CameraIndex = 0;
[HideInInspector, SerializeField]
private CameraDevicePreference cameraPreference = CameraDevicePreference.PreferObjectSensing;
private CameraParameters parameters = null;
private bool willOpen;
public event Action DeviceCreated;
public event Action DeviceOpened;
public event Action DeviceClosed;
public enum CameraDeviceOpenMethod
{
DeviceType,
DeviceIndex,
}
public override int BufferCapacity
{
get
{
if (Device != null)
{
return Device.bufferCapacity();
}
return bufferCapacity;
}
set
{
bufferCapacity = value;
if (Device != null)
{
Device.setBufferCapacity(value);
}
}
}
public override bool HasSpatialInformation
{
get { return false; }
}
public CameraDevicePreference CameraPreference
{
get { return cameraPreference; }
// Switch to prefered FocusMode when switch CameraPreference.
// You can set other FocusMode after this, but the tracking results may differ.
set
{
cameraPreference = value;
switch (cameraPreference)
{
case CameraDevicePreference.PreferObjectSensing:
FocusMode = CameraDeviceFocusMode.Continousauto;
break;
case CameraDevicePreference.PreferSurfaceTracking:
FocusMode = CameraDeviceFocusMode.Medium;
break;
default:
break;
}
}
}
public CameraParameters Parameters
{
get
{
if (Device != null)
{
return Device.cameraParameters();
}
return parameters;
}
set
{
parameters = value;
}
}
protected override void OnEnable()
{
base.OnEnable();
if (Device != null)
{
Device.start();
}
}
protected override void Start()
{
if (!CameraDevice.isAvailable())
{
throw new UIPopupException(typeof(CameraDevice) + " not available");
}
base.Start();
}
protected override void OnDisable()
{
base.OnDisable();
if (Device != null)
{
Device.stop();
}
}
public override void Open()
{
willOpen = true;
CameraDevice.requestPermissions(EasyARController.Scheduler, (Action<PermissionStatus, string>)((status, msg) =>
{
if (!willOpen)
{
return;
}
if (status != PermissionStatus.Granted)
{
throw new UIPopupException("Camera permission not granted");
}
Close();
Device = CameraDeviceSelector.createCameraDevice(CameraPreference);
if (DeviceCreated != null)
{
DeviceCreated();
}
bool openResult = false;
switch (CameraOpenMethod)
{
case CameraDeviceOpenMethod.DeviceType:
openResult = Device.openWithPreferredType(CameraType);
break;
case CameraDeviceOpenMethod.DeviceIndex:
openResult = Device.openWithIndex(CameraIndex);
break;
default:
break;
}
if (!openResult)
{
Debug.LogError("Camera open failed");
Device.Dispose();
Device = null;
return;
}
Device.setFocusMode(FocusMode);
Device.setSize(new Vec2I((int)CameraSize.x, (int)CameraSize.y));
if (parameters != null)
{
Device.setCameraParameters(parameters);
}
if (bufferCapacity != 0)
{
Device.setBufferCapacity(bufferCapacity);
}
if (sink != null)
Device.inputFrameSource().connect(sink);
if (DeviceOpened != null)
{
DeviceOpened();
}
if (enabled)
{
OnEnable();
}
}));
}
public override void Close()
{
willOpen = false;
if (Device != null)
{
OnDisable();
Device.close();
Device.Dispose();
if (DeviceClosed != null)
{
DeviceClosed();
}
Device = null;
}
}
public override void Connect(InputFrameSink val)
{
base.Connect(val);
if (Device != null)
{
Device.inputFrameSource().connect(val);
}
}
}
}
这是 RenderCameraController.cs 脚本:
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using UnityEngine;
namespace easyar
{
public class RenderCameraController : MonoBehaviour
{
public Camera TargetCamera;
public RenderCameraParameters ExternalParameters;
private CameraImageRenderer cameraRenderer;
private Matrix4x4 currentDisplayCompensation = Matrix4x4.identity;
private CameraParameters cameraParameters;
private bool projectHFilp;
private ARSession arSession;
protected virtual void OnEnable()
{
if (arSession)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
}
protected virtual void OnDisable()
{
if (arSession)
{
arSession.FrameChange -= OnFrameChange;
arSession.FrameUpdate -= OnFrameUpdate;
}
}
protected virtual void OnDestroy()
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
if (ExternalParameters)
{
ExternalParameters.Dispose();
}
}
internal void OnAssemble(ARSession session)
{
arSession = session;
if (!TargetCamera)
{
TargetCamera = session.Assembly.Camera;
}
if (enabled)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
cameraRenderer = GetComponent<CameraImageRenderer>();
if (cameraRenderer)
{
cameraRenderer.OnAssemble(session);
}
}
internal void SetProjectHFlip(bool hFlip)
{
projectHFilp = hFlip;
}
internal void SetRenderImageHFilp(bool hFlip)
{
if (cameraRenderer)
{
cameraRenderer.SetHFilp(hFlip);
}
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
return;
}
currentDisplayCompensation = displayCompensation.inverse;
using (var frame = outputFrame.inputFrame())
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
if (ExternalParameters)
{
ExternalParameters.Build(cameraParameters);
}
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
var camParameters = ExternalParameters ? ExternalParameters.Parameters : cameraParameters;
var projection = camParameters.projection(TargetCamera.nearClipPlane, TargetCamera.farClipPlane, TargetCamera.aspect, EasyARController.Instance.Display.Rotation, false, false).ToUnityMatrix();
if (ExternalParameters)
{
projection *= ExternalParameters.Transform;
}
projection *= currentDisplayCompensation;
if (projectHFilp)
{
var translateMatrix = Matrix4x4.identity;
translateMatrix.m00 = -1;
projection = translateMatrix * projection;
}
TargetCamera.projectionMatrix = projection;
GL.invertCulling = projectHFilp;
}
}
}
我对如何解决这个问题感到绝望。
所以如果有人遇到同样的问题:
要解决黑屏,只需打开“CameraImageRenderer.cs”,转到第 180 行并替换该行
commandBuffer.Blit(null, BuiltinRenderTextureType.CameraTarget, material);
和
commandBuffer.Blit(material.HasProperty("_MainTex") ? material.GetTexture("_MainTex") : null, BuiltinRenderTextureType.CameraTarget, material);
此问题已在 EasyAR 4.1 的最新更新中得到解决。我在这里找到了解决方案:https://www.easyar.cn/view/questionDetails.html#163