Skip to content

Instantly share code, notes, and snippets.

@lookoutking
Created July 6, 2022 07:38
Show Gist options
  • Select an option

  • Save lookoutking/734821cce65236300e1e1907bc1a2d68 to your computer and use it in GitHub Desktop.

Select an option

Save lookoutking/734821cce65236300e1e1907bc1a2d68 to your computer and use it in GitHub Desktop.
ARFoundation Camera Frame Sender for Unity Render Streaming package
using System.Collections.Generic;
using Unity.WebRTC;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering;
using Unity.RenderStreaming;
using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
using Unity.Collections;
using System;
using Unity.Collections.LowLevel.Unsafe;
using UnityEngine.UI;
public class ARCameraStreamSender : VideoStreamSender
{
[SerializeField] private ARCameraManager _cameraManager;
[SerializeField] private RawImage _rawImage;
public override Texture SendTexture => _sendTexture;
private RenderTexture _sendTexture;
private RenderTexture _screenTexture;
private HashSet<string> _connections = new HashSet<string>();
[SerializeField, RenderTextureDepthBuffer]
private int _depth = 0;
[SerializeField, RenderTextureAntiAliasing]
private int _antiAliasing = 1;
protected virtual void Awake()
{
var format = WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
_screenTexture = new RenderTexture(Screen.width, Screen.height, _depth, RenderTextureFormat.Default) { antiAliasing = _antiAliasing };
_screenTexture.Create();
OnStartedStream += id => _connections.Add(id);
OnStoppedStream += id => _connections.Remove(id);
}
private void OnEnable()
{
_cameraManager.frameReceived += OnCameraFrameReceived;
}
private void OnDisable()
{
_cameraManager.frameReceived -= OnCameraFrameReceived;
}
protected void OnDestroy()
{
if (_sendTexture != null)
{
DestroyImmediate(_sendTexture);
_sendTexture = null;
}
if (_screenTexture != null)
{
DestroyImmediate(_screenTexture);
_screenTexture = null;
}
}
protected override MediaStreamTrack CreateTrack()
{
RenderTexture rt;
if (_sendTexture != null)
{
rt = _sendTexture;
RenderTextureFormat supportFormat =
WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
GraphicsFormat graphicsFormat =
GraphicsFormatUtility.GetGraphicsFormat(supportFormat, RenderTextureReadWrite.Default);
GraphicsFormat compatibleFormat = SystemInfo.GetCompatibleFormat(graphicsFormat, FormatUsage.Render);
GraphicsFormat format = graphicsFormat == compatibleFormat ? graphicsFormat : compatibleFormat;
if (rt.graphicsFormat != format)
{
Debug.LogWarning(
$"This color format:{rt.graphicsFormat} not support in unity.webrtc. Change to supported color format:{format}.");
rt.Release();
rt.graphicsFormat = format;
rt.Create();
}
_sendTexture = rt;
}
else
{
RenderTextureFormat format =
WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
rt = new RenderTexture(streamingSize.x, streamingSize.y, _depth, format) { antiAliasing = _antiAliasing };
rt.Create();
_sendTexture = rt;
}
// The texture obtained by ScreenCapture.CaptureScreenshotIntoRenderTexture is different between OpenGL and other Graphics APIs.
// In OpenGL, we got a texture that is not inverted, so need flip when sending.
var isOpenGl = SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLCore ||
SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES2 ||
SystemInfo.graphicsDeviceType == GraphicsDeviceType.OpenGLES3;
return new VideoStreamTrack(rt, isOpenGl);
}
// https://forum.unity.com/threads/ar-foundation-camera-output-to-render-texture.1075068/
private unsafe void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
{
// _textLog.text = $"Try Test Connection: {connections.Count}";
// if (!connections.Any())// || _sendTexture == null || !_sendTexture.IsCreated())
// {
// return;
// }
//_textLog.text = "Try AcquireLatestCpuImage";
if (!_cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image))
return;
//_textLog.text = "Camera Frame Received";
var conversionParams = new XRCpuImage.ConversionParams
{
// Get the entire image.
inputRect = new RectInt(0, 0, image.width, image.height),
// Downsample by 2.
outputDimensions = new Vector2Int(image.width / 2, image.height / 2),
// Choose RGBA format.
outputFormat = TextureFormat.RGBA32,
// Flip across the vertical axis (mirror image).
transformation = XRCpuImage.Transformation.MirrorY
};
// See how many bytes you need to store the final image.
int size = image.GetConvertedDataSize(conversionParams);
// Allocate a buffer to store the image.
var buffer = new NativeArray<byte>(size, Allocator.Temp);
// Extract the image data
image.Convert(conversionParams, new IntPtr(buffer.GetUnsafePtr()), buffer.Length);
// The image was converted to RGBA32 format and written into the provided buffer
// so you can dispose of the XRCpuImage. You must do this or it will leak resources.
image.Dispose();
// At this point, you can process the image, pass it to a computer vision algorithm, etc.
// In this example, you apply it to a texture to visualize it.
// You've got the data; let's put it into a texture so you can visualize it.
var texture = new Texture2D(
conversionParams.outputDimensions.x,
conversionParams.outputDimensions.y,
conversionParams.outputFormat,
false);
texture.LoadRawTextureData(buffer);
texture.Apply();
Graphics.Blit(texture, _sendTexture);
//_texture = TextureTools.RotateTexture(_texture, 90);
_rawImage.texture = texture;
// Done with your temporary data, so you can dispose it.
buffer.Dispose();
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment