using System;
using System.Collections.Generic;
using Unity.Collections;
using UnityEngine.Serialization;
using UnityEngine.XR.ARSubsystems;
using UnityEngine.Rendering;
namespace UnityEngine.XR.ARFoundation
{
///
/// Manages the lifetime of the XRCameraSubsystem. Add one of these to a Camera in your scene
/// if you want camera texture and light estimation information to be available.
///
[DefaultExecutionOrder(ARUpdateOrder.k_CameraManager)]
[DisallowMultipleComponent]
[RequireComponent(typeof(Camera))]
[HelpURL(HelpUrls.ApiWithNamespace + nameof(ARCameraManager) + ".html")]
public sealed class ARCameraManager :
SubsystemLifecycleManager,
ISerializationCallbackReceiver
{
[SerializeField]
[HideInInspector]
CameraFocusMode m_FocusMode = CameraFocusMode.Auto;
[SerializeField]
[HideInInspector]
LightEstimationMode m_LightEstimationMode = LightEstimationMode.Disabled;
///
/// Part of the [ISerializationCallbackReceiver](https://docs.unity3d.com/ScriptReference/ISerializationCallbackReceiver.html)
/// interface. Invoked before serialization.
///
public void OnBeforeSerialize() {}
///
/// Part of the [ISerializationCallbackReceiver](https://docs.unity3d.com/ScriptReference/ISerializationCallbackReceiver.html)
/// interface. Invoked after deserialization.
///
public void OnAfterDeserialize()
{
if (m_FocusMode != (CameraFocusMode)(-1))
{
m_AutoFocus = m_FocusMode == CameraFocusMode.Auto;
m_FocusMode = (CameraFocusMode)(-1);
}
if (m_LightEstimationMode != (LightEstimationMode)(-1))
{
m_LightEstimation = m_LightEstimationMode.ToLightEstimation();
m_LightEstimationMode = (LightEstimationMode)(-1);
}
}
[SerializeField]
[Tooltip("When enabled, auto focus will be requested on the (physical) AR camera.")]
bool m_AutoFocus = true;
///
/// Get or set whether auto focus is requested.
///
public bool autoFocusRequested
{
get => subsystem?.autoFocusRequested ?? m_AutoFocus;
set
{
m_AutoFocus = value;
if (enabled && subsystem != null)
{
subsystem.autoFocusRequested = value;
}
}
}
///
/// Get or set the focus mode. This method is obsolete. The getter uses
/// and the setter uses .
///
[Obsolete("Use autoFocusEnabled or autoFocusRequested instead. (2019-12-13)")]
public CameraFocusMode focusMode
{
get => autoFocusEnabled ? CameraFocusMode.Auto : CameraFocusMode.Fixed;
set => autoFocusRequested = (value == CameraFocusMode.Auto);
}
///
/// Get the current focus mode in use by the subsystem, or false if there
/// is no subsystem.
///
public bool autoFocusEnabled => subsystem?.autoFocusEnabled ?? false;
[SerializeField]
[Tooltip("The light estimation mode for the AR camera.")]
LightEstimation m_LightEstimation = LightEstimation.None;
///
/// Get or set the requested LightEstimation for the camera.
///
///
/// The light estimation mode for the camera.
///
public LightEstimation requestedLightEstimation
{
get => subsystem?.requestedLightEstimation.ToLightEstimation() ?? m_LightEstimation;
set
{
m_LightEstimation = value;
if (enabled && subsystem != null)
{
subsystem.requestedLightEstimation = value.ToFeature();
}
}
}
///
/// Get the current light estimation mode used by the subsystem, or LightEstimation.None
/// if there is no subsystem.
///
public LightEstimation currentLightEstimation => subsystem?.currentLightEstimation.ToLightEstimation() ?? LightEstimation.None;
///
/// Get or set the light estimation mode. This method is obsolete. The getter
/// uses and the setter uses
/// .
///
[Obsolete("Use currentLightEstimation or requestedLightEstimation instead. (2019-12-13)")]
public LightEstimationMode lightEstimationMode
{
get => m_LightEstimation.ToLightEstimationMode();
set => requestedLightEstimation = value.ToLightEstimation();
}
[SerializeField]
[Tooltip("The requested camera facing direction")]
CameraFacingDirection m_FacingDirection = CameraFacingDirection.World;
///
/// Get or set the requested camera facing direction.
///
public CameraFacingDirection requestedFacingDirection
{
get => subsystem?.requestedCamera.ToCameraFacingDirection() ?? m_FacingDirection;
set
{
m_FacingDirection = value;
if (enabled && subsystem != null)
{
subsystem.requestedCamera = value.ToFeature();
}
}
}
///
/// The current camera facing direction. This should usually match
/// but might be different if the platform cannot service the requested camera facing direction, or it might
/// take a few frames for the requested facing direction to become active.
///
public CameraFacingDirection currentFacingDirection => subsystem?.currentCamera.ToCameraFacingDirection() ?? CameraFacingDirection.None;
///
/// Determines whether camera permission has been granted.
///
///
/// true if permission has been granted. Otherwise, false.
///
public bool permissionGranted => (subsystem != null) && subsystem.permissionGranted;
///
/// An event which fires each time a new camera frame is received.
///
public event Action frameReceived;
///
/// The material used in background rendering.
///
///
/// The material used in background rendering.
///
public Material cameraMaterial => (subsystem == null) ? null : subsystem.cameraMaterial;
///
/// Tries to get camera intrinsics. Camera intrinsics refers to properties
/// of a physical camera which might be useful when performing additional
/// computer vision processing on the camera image.
///
///
/// > [!NOTE]
/// > The intrinsics may change each frame. You should call this each frame that you need intrinsics
/// > in order to ensure you are using the intrinsics for the current frame.
///
/// The camera intrinsics to be populated if the camera supports intrinsics.
///
///
/// true if was populated. Otherwise, false.
///
public bool TryGetIntrinsics(out XRCameraIntrinsics cameraIntrinsics)
{
if (subsystem == null)
{
cameraIntrinsics = default(XRCameraIntrinsics);
return false;
}
return subsystem.TryGetIntrinsics(out cameraIntrinsics);
}
///
/// Get the camera configurations currently supported for the implementation.
///
/// The allocation strategy to use for the returned data.
///
/// The supported camera configurations.
///
public NativeArray GetConfigurations(Allocator allocator)
=> ((subsystem == null) ? new NativeArray(0, allocator)
: subsystem.GetConfigurations(allocator));
///
/// The current camera configuration.
///
///
/// The current camera configuration, if it exists. Otherise, null.
///
/// Thrown when setting the current configuration if the
/// implementation does not support camera configurations.
/// Thrown when setting the current configuration if the given
/// configuration is null.
/// Thrown when setting the current configuration if the given
/// configuration is not a supported camera configuration.
/// Thrown when setting the current configuration if the
/// implementation is unable to set the current camera configuration.
public XRCameraConfiguration? currentConfiguration
{
get => (subsystem == null) ? null : subsystem.currentConfiguration;
set
{
if (subsystem != null)
{
subsystem.currentConfiguration = value;
}
}
}
///
/// Attempts to acquire the latest camera image. This provides direct access to the raw pixel data, as well as
/// to utilities to convert to RGB and Grayscale formats. This method is deprecated. Use
/// instead.
///
///
/// The `XRCpuImage` must be disposed to avoid resource leaks.
///
/// A valid `XRCpuImage` if this method returns `true`.
/// Returns `true` if the latest camera image was successfully acquired.
/// Returns `false` otherwise.
[Obsolete("Use TryAcquireLatestCpuImage instead. (2020-05-19")]
public bool TryGetLatestImage(out XRCpuImage cpuImage) => TryAcquireLatestCpuImage(out cpuImage);
///
/// Attempts to acquire the latest camera image. This provides direct access to the raw pixel data, as well as
/// to utilities to convert to RGB and Grayscale formats.
///
///
/// The `XRCpuImage` must be disposed to avoid resource leaks.
///
/// A valid `XRCpuImage` if this method returns `true`.
/// Returns `true` if the latest camera image was successfully acquired.
/// Returns `false` otherwise.
public bool TryAcquireLatestCpuImage(out XRCpuImage cpuImage)
{
if (subsystem == null)
{
cpuImage = default;
return false;
}
return subsystem.TryAcquireLatestCpuImage(out cpuImage);
}
void Awake()
{
m_Camera = GetComponent();
}
///
/// Callback before the subsystem is started (but after it is created).
///
protected override void OnBeforeStart()
{
subsystem.autoFocusRequested = m_AutoFocus;
subsystem.requestedLightEstimation = m_LightEstimation.ToFeature();
subsystem.requestedCamera = m_FacingDirection.ToFeature();
}
///
/// Callback when the manager is disabled.
///
protected override void OnDisable()
{
base.OnDisable();
foreach (var textureInfo in m_TextureInfos)
{
textureInfo.Dispose();
}
m_TextureInfos.Clear();
}
void Update()
{
if (subsystem == null)
return;
m_FacingDirection = subsystem.requestedCamera.ToCameraFacingDirection();
m_LightEstimation = subsystem.requestedLightEstimation.ToLightEstimation();
m_AutoFocus = subsystem.autoFocusRequested;
var cameraParams = new XRCameraParams
{
zNear = m_Camera.nearClipPlane,
zFar = m_Camera.farClipPlane,
screenWidth = Screen.width,
screenHeight = Screen.height,
screenOrientation = Screen.orientation
};
XRCameraFrame frame;
if (subsystem.TryGetLatestFrame(cameraParams, out frame))
{
UpdateTexturesInfos();
if (frameReceived != null)
InvokeFrameReceivedEvent(frame);
}
}
///
/// Pull the texture descriptors from the camera subsystem, and update the texture information maintained by
/// this component.
///
void UpdateTexturesInfos()
{
var textureDescriptors = subsystem.GetTextureDescriptors(Allocator.Temp);
try
{
int numUpdated = Math.Min(m_TextureInfos.Count, textureDescriptors.Length);
// Update the existing textures that are in common between the two arrays.
for (int i = 0; i < numUpdated; ++i)
{
m_TextureInfos[i] = ARTextureInfo.GetUpdatedTextureInfo(m_TextureInfos[i], textureDescriptors[i]);
}
// If there are fewer textures in the current frame than we had previously, destroy any remaining unneeded
// textures.
if (numUpdated < m_TextureInfos.Count)
{
for (int i = numUpdated; i < m_TextureInfos.Count; ++i)
{
m_TextureInfos[i].Reset();
}
m_TextureInfos.RemoveRange(numUpdated, (m_TextureInfos.Count - numUpdated));
}
// Else, if there are more textures in the current frame than we have previously, add new textures for any
// additional descriptors.
else if (textureDescriptors.Length > m_TextureInfos.Count)
{
for (int i = numUpdated; i < textureDescriptors.Length; ++i)
{
m_TextureInfos.Add(new ARTextureInfo(textureDescriptors[i]));
}
}
}
finally
{
if (textureDescriptors.IsCreated)
textureDescriptors.Dispose();
}
}
///
/// Invoke the camera frame received event packing the frame information into the event argument.
///
/// The camera frame raising the event.
void InvokeFrameReceivedEvent(XRCameraFrame frame)
{
var lightEstimation = new ARLightEstimationData();
if (frame.hasAverageBrightness)
lightEstimation.averageBrightness = frame.averageBrightness;
if (frame.hasAverageIntensityInLumens)
lightEstimation.averageIntensityInLumens = frame.averageIntensityInLumens;
if (frame.hasAverageColorTemperature)
lightEstimation.averageColorTemperature = frame.averageColorTemperature;
if (frame.hasColorCorrection)
lightEstimation.colorCorrection = frame.colorCorrection;
if (frame.hasMainLightDirection)
lightEstimation.mainLightDirection = frame.mainLightDirection;
if (frame.hasMainLightIntensityLumens)
lightEstimation.mainLightIntensityLumens = frame.mainLightIntensityLumens;
if (frame.hasMainLightColor)
lightEstimation.mainLightColor = frame.mainLightColor;
if (frame.hasAmbientSphericalHarmonics)
lightEstimation.ambientSphericalHarmonics = frame.ambientSphericalHarmonics;
var eventArgs = new ARCameraFrameEventArgs();
eventArgs.lightEstimation = lightEstimation;
if (frame.hasTimestamp)
eventArgs.timestampNs = frame.timestampNs;
if (frame.hasProjectionMatrix)
eventArgs.projectionMatrix = frame.projectionMatrix;
if (frame.hasDisplayMatrix)
eventArgs.displayMatrix = frame.displayMatrix;
if (frame.hasExposureDuration)
eventArgs.exposureDuration = frame.exposureDuration;
if (frame.hasExposureOffset)
eventArgs.exposureOffset = frame.exposureOffset;
if (frame.hasCameraGrain)
{
if(m_CameraGrainInfo.texture == null && ARTextureInfo.IsSupported(frame.cameraGrain))
{
m_CameraGrainInfo = new ARTextureInfo(frame.cameraGrain);
}
else if(m_CameraGrainInfo.texture != null && ARTextureInfo.IsSupported(frame.cameraGrain))
{
m_CameraGrainInfo = ARTextureInfo.GetUpdatedTextureInfo(m_CameraGrainInfo, frame.cameraGrain);
}
eventArgs.cameraGrainTexture = m_CameraGrainInfo.texture;
}
if(frame.hasNoiseIntensity)
eventArgs.noiseIntensity = frame.noiseIntensity;
s_Textures.Clear();
s_PropertyIds.Clear();
foreach (var textureInfo in m_TextureInfos)
{
DebugAssert.That(textureInfo.descriptor.dimension == TextureDimension.Tex2D)?.
WithMessage($"Camera Texture needs to be a Texture 2D, but instead is {textureInfo.descriptor.dimension.ToString()}.");
s_Textures.Add((Texture2D)textureInfo.texture);
s_PropertyIds.Add(textureInfo.descriptor.propertyNameId);
}
subsystem.GetMaterialKeywords(out List enabledMaterialKeywords, out ListdisabledMaterialKeywords);
eventArgs.textures = s_Textures;
eventArgs.propertyNameIds = s_PropertyIds;
eventArgs.enabledMaterialKeywords = enabledMaterialKeywords;
eventArgs.disabledMaterialKeywords = disabledMaterialKeywords;
frameReceived(eventArgs);
}
static List s_Textures = new List();
static List s_PropertyIds = new List();
readonly List m_TextureInfos = new List();
Camera m_Camera;
bool m_PreRenderInvertCullingValue;
ARTextureInfo m_CameraGrainInfo;
}
}