// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using Microsoft.MixedReality.Toolkit.Utilities; using System; using System.IO; using System.Threading.Tasks; using UnityEngine; namespace Microsoft.MixedReality.Toolkit.Input { /// /// Provides input recording into an internal buffer and exporting to files. /// [MixedRealityDataProvider( typeof(IMixedRealityInputSystem), (SupportedPlatforms)(-1), // Supported on all platforms "Input Recording Service", "Profiles/DefaultMixedRealityInputRecordingProfile.asset", "MixedRealityToolkit.SDK", true)] public class InputRecordingService : BaseInputDeviceManager, IMixedRealityInputRecordingService { /// /// Invoked when recording begins /// public event Action OnRecordingStarted; /// /// Invoked when recording ends /// public event Action OnRecordingStopped; /// public bool IsRecording { get; private set; } = false; private bool useBufferTimeLimit = true; /// public bool UseBufferTimeLimit { get { return useBufferTimeLimit; } set { if (useBufferTimeLimit && !value) { // Start at buffer limit when making buffer unlimited unlimitedRecordingStartTime = StartTime; } useBufferTimeLimit = value; if (useBufferTimeLimit) { PruneBuffer(); } } } private float recordingBufferTimeLimit = 30.0f; /// public float RecordingBufferTimeLimit { get { return recordingBufferTimeLimit; } set { recordingBufferTimeLimit = Mathf.Max(value, 0.0f); if (useBufferTimeLimit) { PruneBuffer(); } } } // Start time of recording if buffer is unlimited. // Nullable to determine when time needs to be reset. private float? unlimitedRecordingStartTime = null; /// /// Start time of recording. /// public float StartTime { get { if (unlimitedRecordingStartTime.HasValue) { if (useBufferTimeLimit) { return Mathf.Max(unlimitedRecordingStartTime.Value, EndTime - recordingBufferTimeLimit); } else { return unlimitedRecordingStartTime.Value; } } return EndTime; } } /// /// End time of recording. /// public float EndTime { get; private set; } /// /// The profile used for recording. /// public MixedRealityInputRecordingProfile InputRecordingProfile { get { var profile = ConfigurationProfile as MixedRealityInputRecordingProfile; if (!profile) { Debug.LogError("Profile for Input Recording Service must be a MixedRealityInputRecordingProfile"); } return profile; } set => ConfigurationProfile = value; } private float frameRate; private float frameInterval; private float nextFrame; private InputRecordingBuffer recordingBuffer = null; private IMixedRealityEyeGazeProvider eyeGazeProvider; /// /// Constructor. /// /// The instance that loaded the data provider. /// The instance that receives data from this provider. /// Friendly name of the service. /// Service priority. Used to determine order of instantiation. /// The service's configuration profile. [Obsolete("This constructor is obsolete (registrar parameter is no longer required) and will be removed in a future version of the Microsoft Mixed Reality Toolkit.")] public InputRecordingService( IMixedRealityServiceRegistrar registrar, IMixedRealityInputSystem inputSystem, string name = null, uint priority = DefaultPriority, BaseMixedRealityProfile profile = null) : this(inputSystem, name, priority, profile) { Registrar = registrar; } /// /// Constructor. /// /// The instance that receives data from this provider. /// Friendly name of the service. /// Service priority. Used to determine order of instantiation. /// The service's configuration profile. public InputRecordingService( IMixedRealityInputSystem inputSystem, string name = null, uint priority = DefaultPriority, BaseMixedRealityProfile profile = null) : base(inputSystem, name, priority, profile) { } /// public override void Enable() { base.Enable(); recordingBuffer = new InputRecordingBuffer(); } /// public override void Disable() { base.Disable(); recordingBuffer = null; ResetStartTime(); } /// public void StartRecording() { eyeGazeProvider = CoreServices.InputSystem.EyeGazeProvider; IsRecording = true; frameRate = InputRecordingProfile.FrameRate; frameInterval = 1f / frameRate; nextFrame = Time.time + frameInterval; if (UseBufferTimeLimit) { PruneBuffer(); } if (!unlimitedRecordingStartTime.HasValue) { unlimitedRecordingStartTime = Time.time; } OnRecordingStarted?.Invoke(); } /// public void StopRecording() { IsRecording = false; OnRecordingStopped?.Invoke(); } /// public override void LateUpdate() { if (IsEnabled && IsRecording && Time.time > nextFrame) { EndTime = Time.time; nextFrame += frameInterval * (Mathf.Floor((Time.time - nextFrame) * frameRate) + 1f); if (UseBufferTimeLimit) { PruneBuffer(); } RecordKeyframe(); } } /// public void DiscardRecordedInput() { if (IsEnabled) { recordingBuffer.Clear(); ResetStartTime(); } } /// public string SaveInputAnimation(string directory = null) => SaveInputAnimation(InputAnimationSerializationUtils.GetOutputFilename(), directory); /// public string SaveInputAnimation(string filename, string directory) { if (IsEnabled) { string path = Path.Combine(directory ?? Application.persistentDataPath, filename); try { using (Stream fileStream = File.Open(path, FileMode.Create)) { PruneBuffer(); var animation = InputAnimation.FromRecordingBuffer(recordingBuffer, InputRecordingProfile); Debug.Log($"Recording buffer saved to animation"); animation.ToStream(fileStream, 0f); Debug.Log($"Recorded input animation exported to {path}"); } return path; } catch (IOException ex) { Debug.LogWarning(ex.Message); } } return ""; } /// public Task SaveInputAnimationAsync(string directory = null) => SaveInputAnimationAsync(InputAnimationSerializationUtils.GetOutputFilename(), directory); /// public async Task SaveInputAnimationAsync(string filename, string directory) { if (IsEnabled) { string path = Path.Combine(directory ?? Application.persistentDataPath, filename); try { using (Stream fileStream = File.Open(path, FileMode.Create)) { PruneBuffer(); var animation = await Task.Run(() => InputAnimation.FromRecordingBuffer(recordingBuffer, InputRecordingProfile)); Debug.Log($"Recording buffer saved to animation"); await animation.ToStreamAsync(fileStream, 0f); Debug.Log($"Recorded input animation exported to {path}"); } return path; } catch (IOException ex) { Debug.LogWarning(ex.Message); } } return ""; } private void ResetStartTime() { if (IsRecording) { unlimitedRecordingStartTime = Time.time; } else { unlimitedRecordingStartTime = null; } } /// /// Record a keyframe at the given time for the main camera and tracked input devices. /// private void RecordKeyframe() { float time = Time.time; var profile = InputRecordingProfile; recordingBuffer.NewKeyframe(time); if (profile.RecordHandData) { RecordInputHandData(Handedness.Left); RecordInputHandData(Handedness.Right); } MixedRealityPose cameraPose; if (profile.RecordCameraPose && CameraCache.Main) { cameraPose = new MixedRealityPose(CameraCache.Main.transform.position, CameraCache.Main.transform.rotation); recordingBuffer.SetCameraPose(cameraPose); } else { cameraPose = new MixedRealityPose(Vector3.zero, Quaternion.identity); } if (profile.RecordEyeGaze) { if (eyeGazeProvider != null) { recordingBuffer.SetGazeRay(eyeGazeProvider.LatestEyeGaze); } else { recordingBuffer.SetGazeRay(new Ray(cameraPose.Position, cameraPose.Forward)); } } } /// /// Record a keyframe at the given time for a hand with the given handedness it is tracked. /// private void RecordInputHandData(Handedness handedness) { float time = Time.time; var profile = InputRecordingProfile; var hand = HandJointUtils.FindHand(handedness); if (hand == null) { recordingBuffer.SetHandState(handedness, false, false); return; } bool isTracked = (hand.TrackingState == TrackingState.Tracked); // Extract extra information from current interactions bool isPinching = false; for (int i = 0; i < hand.Interactions?.Length; i++) { var interaction = hand.Interactions[i]; switch (interaction.InputType) { case DeviceInputType.Select: isPinching = interaction.BoolData; break; } } recordingBuffer.SetHandState(handedness, isTracked, isPinching); if (isTracked) { for (int i = 0; i < ArticulatedHandPose.JointCount; ++i) { if (hand.TryGetJoint((TrackedHandJoint)i, out MixedRealityPose jointPose)) { recordingBuffer.SetJointPose(handedness, (TrackedHandJoint)i, jointPose); } } } } /// Discard keyframes before the cutoff time. private void PruneBuffer() { recordingBuffer.RemoveBeforeTime(StartTime); } } }