Okay, so I still don’t have a fully tested and proven solution yet, but I’m now able to see the stream. For anyone else wondering how to do this in Unity, here’s the basics.
I’m using:
Ricoh Theta V with firmware 3.50.1, working in AP mode (default) on API v2.1.
Unity 2019.4
First you will need to set up a custom DownloadHandler to read the data. I created one based on the answer in this StackOverflow post.
Here’s my implementation (there’s probably a better way to write it, I just hastily implemented this to verify if this is indeed the way to go about it):
ThetaStreamRequestHandler.cs
using UnityEngine;
using UnityEngine.Networking;
using System;
public class ThetaStreamRequestHandler : DownloadHandlerScript
{
public event Action<Texture2D> Ev_FrameCaptured;
public Texture2D LastFrame { get; private set; }
// Standard scripted download handler - will allocate memory on each ReceiveData callback
public ThetaStreamRequestHandler()
: base()
{
}
// Pre-allocated scripted download handler
// Will reuse the supplied byte array to deliver data.
// Eliminates memory allocation.
public ThetaStreamRequestHandler( byte[] buffer )
: base( buffer )
{
}
// Required by DownloadHandler base class. Called when you address the 'bytes' property.
protected override byte[] GetData() { return null; }
private int counter = 0;
private byte[] image = new byte[50000000];
// Called once per frame when data has been received from the network.
protected override bool ReceiveData( byte[] byteFromCamera, int dataLength )
{
if( byteFromCamera == null || byteFromCamera.Length < 1 )
{
//Debug.Log("CustomWebRequest :: ReceiveData - received a null/empty buffer");
return false;
}
//Search of JPEG Image here
for( int i = 0; i < dataLength; i++ )
{
if( counter > 2 && byteFromCamera[i] == 0xD9 && image[counter - 1] == 0xFF )
{
image[counter] = byteFromCamera[i];
counter = 0;
LastFrame = new Texture2D( 2, 2 );
LastFrame.LoadImage( image );
Ev_FrameCaptured?.Invoke( LastFrame );
}
if( ( counter == 0 && byteFromCamera[i] == 0xFF ) || ( counter == 1 && byteFromCamera[i] == 0xD8 ) || counter > 1 )
{
image[counter] = byteFromCamera[i];
counter++;
}
}
return true;
}
}
Then you will need to set up a UnityWebRequest, which I believe should stay “alive” during the streaming duration. Something like this:
ThetaVZ1Streaming.cs
using UnityEngine;
using UnityEngine.Networking;
public class ThetaVZ1Streaming : MonoBehaviour
{
[SerializeField] private Renderer _output;
[SerializeField] private string _cameraIP = "192.168.1.1";
private UnityWebRequest _request;
[ContextMenu( "Start Streaming" )]
public void StartStreaming()
{
_request = BuildCommandRequest( "{ \"name\": \"camera.getLivePreview\" }" );
( _request.downloadHandler as ThetaStreamRequestHandler ).Ev_FrameCaptured += OnFrameCaptured;
_request.SendWebRequest();
}
[ContextMenu( "Stop Streaming")]
public void StopStreaming()
{
_request.Dispose();
_request = null;
}
private void OnDestroy()
{
StopStreaming();
}
private void OnFrameCaptured( Texture2D frame )
{
_output.material.mainTexture = frame;
}
private UnityWebRequest BuildCommandRequest( string jsonCommand )
{
UnityWebRequest uwr = new UnityWebRequest( $"http://{_cameraIP}/osc/commands/execute", "POST" );
byte[] jsonToSend = new System.Text.UTF8Encoding().GetBytes( jsonCommand );
uwr.uploadHandler = new UploadHandlerRaw( jsonToSend );
uwr.downloadHandler = new ThetaStreamRequestHandler();
uwr.SetRequestHeader( "Content-Type", "application/json;charset=utf-8" );
return uwr;
}
}
I created a Sphere, added ThetaVZ1Streaming to it and set the renderer as itself just to test. I believe this should be enough to get anyone on the right track on how to stream from Theta V/Z1 from inside Unity3D using the WebAPI. Sorry if this answer is already posted somewhere, I googled a lot and didn’t find any concrete solution though.
Thanks a lot to @craig for your insight, was really helpful!