diff --git a/com.microsoft.mixedreality.webrtc/CHANGELOG.md b/com.microsoft.mixedreality.webrtc/CHANGELOG.md new file mode 100644 index 0000000..e69de29 diff --git a/com.microsoft.mixedreality.webrtc/CHANGELOG.md.meta b/com.microsoft.mixedreality.webrtc/CHANGELOG.md.meta new file mode 100644 index 0000000..9b99a8c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/CHANGELOG.md.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 509ffae3bcf10934a8ee3aed9d422669 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Documentation~/install1.png b/com.microsoft.mixedreality.webrtc/Documentation~/install1.png new file mode 100644 index 0000000..1561928 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Documentation~/install1.png differ diff --git a/com.microsoft.mixedreality.webrtc/Documentation~/install2.png b/com.microsoft.mixedreality.webrtc/Documentation~/install2.png new file mode 100644 index 0000000..5a60469 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Documentation~/install2.png differ diff --git a/com.microsoft.mixedreality.webrtc/Editor.meta b/com.microsoft.mixedreality.webrtc/Editor.meta new file mode 100644 index 0000000..12c6589 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: f096824885b82e948a221c950ea92594 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/CaptureCameraDrawer.cs b/com.microsoft.mixedreality.webrtc/Editor/CaptureCameraDrawer.cs new file mode 100644 index 0000000..3b9451d --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/CaptureCameraDrawer.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; +using UnityEditor; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Property drawer for , to report an error to the user if + /// the associated property instance cannot be used for framebuffer + /// capture by . + /// + [CustomPropertyDrawer(typeof(CaptureCameraAttribute))] + public class CaptureCameraDrawer : PropertyDrawer + { + private const int c_errorMessageHeight = 42; + + public override void OnGUI(Rect position, SerializedProperty property, GUIContent label) + { + try + { + Validate(property.objectReferenceValue as Camera); + } + catch (Exception ex) + { + // Display error message below the property + var totalHeight = position.height; + position.yMin = position.yMax - c_errorMessageHeight; + EditorGUI.HelpBox(position, ex.Message, MessageType.Warning); + + // Adjust rect for the property itself + position.yMin = position.yMax - totalHeight; + position.yMax -= c_errorMessageHeight; + } + + EditorGUI.PropertyField(position, property, label); + } + + public override float GetPropertyHeight(SerializedProperty property, GUIContent label) + { + float height = base.GetPropertyHeight(property, label); + try + { + Validate(property.objectReferenceValue as Camera); + } + catch (Exception) + { + // Add extra space for the error message + height += c_errorMessageHeight; + } + return height; + } + + /// + /// Validate that a given instance can be used for framebuffer + /// capture by based on the current settings of the Unity Player + /// for the current build platform. + /// + /// The camera instance to test the settings of. + /// + /// The camera has settings not compatible with its use with . + /// + /// + public static void Validate(Camera camera) + { + if (PlayerSettings.virtualRealitySupported && (camera != null)) + { + if (PlayerSettings.stereoRenderingPath == StereoRenderingPath.MultiPass) + { + // Ensure camera is not rendering to both eyes in multi-pass stereo, otherwise the command buffer + // is executed twice (once per eye) and will produce twice as many frames, which leads to stuttering + // when playing back the video stream resulting from combining those frames. + if (camera.stereoTargetEye == StereoTargetEyeMask.Both) + { + throw new NotSupportedException("Capture camera renders both eyes in multi-pass stereoscopic rendering. This is not" + + " supported by the capture mechanism which cannot discriminate them. Set Camera.stereoTargetEye to either Left or" + + " Right, or use a different rendering mode (Player Settings > XR Settings > Stereo Rendering Mode)."); + } + } +#if !UNITY_2019_1_OR_NEWER + else if (PlayerSettings.stereoRenderingPath == StereoRenderingPath.Instancing) + { + throw new NotSupportedException("Capture camera does not support single-pass instanced stereoscopic rendering before Unity 2019.1." + + " Use a different stereoscopic rendering mode (Player Settings > XR Settings > Stereo Rendering Mode) or upgrade to Unity 2019.1+."); + } +#endif + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/CaptureCameraDrawer.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/CaptureCameraDrawer.cs.meta new file mode 100644 index 0000000..b2de617 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/CaptureCameraDrawer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: aa1232332d1bd5f4e8caef2583b98ad9 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/ConfigurableIceServerDrawer.cs b/com.microsoft.mixedreality.webrtc/Editor/ConfigurableIceServerDrawer.cs new file mode 100644 index 0000000..952aea4 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/ConfigurableIceServerDrawer.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; +using UnityEditor; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Property drawer for , to display servers on a single line + /// with the kind first (fixed width) and the server address next (stretching). + /// + [CustomPropertyDrawer(typeof(ConfigurableIceServer))] + public class ConfigurableIceServerDrawer : PropertyDrawer + { + const float kTypeWidth = 60f; + + public override void OnGUI(Rect rect, SerializedProperty property, GUIContent label) + { + var type = property.FindPropertyRelative("Type"); + EditorGUI.PropertyField(new Rect(rect.x, rect.y, kTypeWidth, rect.height), type, GUIContent.none); + + rect.x += kTypeWidth - 10f; + rect.width -= kTypeWidth - 10f; + var uri = property.FindPropertyRelative("Uri"); + EditorGUI.PropertyField(rect, uri, GUIContent.none); + } + + public override float GetPropertyHeight(SerializedProperty property, GUIContent label) + { + return EditorStyles.textField.lineHeight + 3f; + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/ConfigurableIceServerDrawer.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/ConfigurableIceServerDrawer.cs.meta new file mode 100644 index 0000000..8550051 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/ConfigurableIceServerDrawer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 609cf190104833a4bb00a549d016469c +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/Icons.meta b/com.microsoft.mixedreality.webrtc/Editor/Icons.meta new file mode 100644 index 0000000..42a652c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/Icons.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 5f4dbb05cc7783e42851c507ae42fd35 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png new file mode 100644 index 0000000..25533dd Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png differ diff --git a/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png.meta b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png.meta new file mode 100644 index 0000000..2947104 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png.meta @@ -0,0 +1,236 @@ +fileFormatVersion: 2 +guid: b8d84fb8445678440929dfd49683d60d +TextureImporter: + fileIDToRecycleName: + 21300000: icon_audio + 21300002: icon_video + 21300004: icon_sendrecv + 21300006: icon_recvonly + 21300008: icon_sendonly + 21300010: icon_inactive + externalObjects: {} + serializedVersion: 9 + mipmaps: + mipMapMode: 0 + enableMipMap: 0 + sRGBTexture: 1 + linearTexture: 0 + fadeOut: 0 + borderMipMap: 0 + mipMapsPreserveCoverage: 0 + alphaTestReferenceValue: 0.5 + mipMapFadeDistanceStart: 1 + mipMapFadeDistanceEnd: 3 + bumpmap: + convertToNormalMap: 0 + externalNormalMap: 0 + heightScale: 0.25 + normalMapFilter: 0 + isReadable: 0 + streamingMipmaps: 0 + streamingMipmapsPriority: 0 + grayScaleToAlpha: 0 + generateCubemap: 6 + cubemapConvolution: 0 + seamlessCubemap: 0 + textureFormat: 1 + maxTextureSize: 2048 + textureSettings: + serializedVersion: 2 + filterMode: -1 + aniso: -1 + mipBias: -100 + wrapU: 1 + wrapV: 1 + wrapW: -1 + nPOTScale: 0 + lightmap: 0 + compressionQuality: 50 + spriteMode: 2 + spriteExtrude: 0 + spriteMeshType: 0 + alignment: 0 + spritePivot: {x: 0.5, y: 0.5} + spritePixelsToUnits: 64 + spriteBorder: {x: 0, y: 0, z: 0, w: 0} + spriteGenerateFallbackPhysicsShape: 0 + alphaUsage: 1 + alphaIsTransparency: 1 + spriteTessellationDetail: -1 + textureType: 8 + textureShape: 1 + singleChannelComponent: 0 + maxTextureSizeSet: 0 + compressionQualitySet: 0 + textureFormatSet: 0 + platformSettings: + - serializedVersion: 2 + buildTarget: DefaultTexturePlatform + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + androidETC2FallbackOverride: 0 + - serializedVersion: 2 + buildTarget: Standalone + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + androidETC2FallbackOverride: 0 + - serializedVersion: 2 + buildTarget: Windows Store Apps + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + androidETC2FallbackOverride: 0 + spriteSheet: + serializedVersion: 2 + sprites: + - serializedVersion: 2 + name: icon_audio + rect: + serializedVersion: 2 + x: 0 + y: 128 + width: 64 + height: 64 + alignment: 0 + pivot: {x: 0.5, y: 0.5} + border: {x: 0, y: 0, z: 0, w: 0} + outline: [] + physicsShape: [] + tessellationDetail: 0 + bones: [] + spriteID: 86923a9d0cc950d4eb6a63a817e45e76 + vertices: [] + indices: + edges: [] + weights: [] + - serializedVersion: 2 + name: icon_video + rect: + serializedVersion: 2 + x: 64 + y: 128 + width: 64 + height: 64 + alignment: 0 + pivot: {x: 0.5, y: 0.5} + border: {x: 0, y: 0, z: 0, w: 0} + outline: [] + physicsShape: [] + tessellationDetail: 0 + bones: [] + spriteID: 48c07850933ea4944a58ed1f77736a0d + vertices: [] + indices: + edges: [] + weights: [] + - serializedVersion: 2 + name: icon_sendrecv + rect: + serializedVersion: 2 + x: 0 + y: 64 + width: 64 + height: 64 + alignment: 0 + pivot: {x: 0.5, y: 0.5} + border: {x: 0, y: 0, z: 0, w: 0} + outline: [] + physicsShape: [] + tessellationDetail: 0 + bones: [] + spriteID: d528722cdcdd2f445b567376409ac709 + vertices: [] + indices: + edges: [] + weights: [] + - serializedVersion: 2 + name: icon_recvonly + rect: + serializedVersion: 2 + x: 64 + y: 64 + width: 64 + height: 64 + alignment: 0 + pivot: {x: 0.5, y: 0.5} + border: {x: 0, y: 0, z: 0, w: 0} + outline: [] + physicsShape: [] + tessellationDetail: 0 + bones: [] + spriteID: a20a054e0f7bcee469020f3cb36fa1a2 + vertices: [] + indices: + edges: [] + weights: [] + - serializedVersion: 2 + name: icon_sendonly + rect: + serializedVersion: 2 + x: 0 + y: 0 + width: 64 + height: 64 + alignment: 0 + pivot: {x: 0.5, y: 0.5} + border: {x: 0, y: 0, z: 0, w: 0} + outline: [] + physicsShape: [] + tessellationDetail: 0 + bones: [] + spriteID: 3ba3b398bb58ca441a8e23ee3263b8a0 + vertices: [] + indices: + edges: [] + weights: [] + - serializedVersion: 2 + name: icon_inactive + rect: + serializedVersion: 2 + x: 64 + y: 0 + width: 64 + height: 64 + alignment: 0 + pivot: {x: 0.5, y: 0.5} + border: {x: 0, y: 0, z: 0, w: 0} + outline: [] + physicsShape: [] + tessellationDetail: 0 + bones: [] + spriteID: fc03eb6886bf96c458b238a0506a1730 + vertices: [] + indices: + edges: [] + weights: [] + outline: [] + physicsShape: [] + bones: [] + spriteID: 9f83ab749ebbef2409485e22f7e40288 + vertices: [] + indices: + edges: [] + weights: [] + spritePackingTag: + pSDRemoveMatte: 0 + pSDShowRemoveMatteOption: 0 + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.svg b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.svg new file mode 100644 index 0000000..6b89482 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.svg @@ -0,0 +1,302 @@ + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.svg.meta b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.svg.meta new file mode 100644 index 0000000..c54d70e --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.svg.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 2f1d1bcc495fc4342931e3e52e5d205f +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/Icons/mixed_reality_icon.png b/com.microsoft.mixedreality.webrtc/Editor/Icons/mixed_reality_icon.png new file mode 100644 index 0000000..79ba423 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Editor/Icons/mixed_reality_icon.png differ diff --git a/com.microsoft.mixedreality.webrtc/Editor/Icons/mixed_reality_icon.png.meta b/com.microsoft.mixedreality.webrtc/Editor/Icons/mixed_reality_icon.png.meta new file mode 100644 index 0000000..8f5ddb1 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/Icons/mixed_reality_icon.png.meta @@ -0,0 +1,110 @@ +fileFormatVersion: 2 +guid: f845cb707dda5c84594956f49655100f +TextureImporter: + fileIDToRecycleName: {} + externalObjects: {} + serializedVersion: 9 + mipmaps: + mipMapMode: 0 + enableMipMap: 1 + sRGBTexture: 1 + linearTexture: 0 + fadeOut: 0 + borderMipMap: 0 + mipMapsPreserveCoverage: 0 + alphaTestReferenceValue: 0.5 + mipMapFadeDistanceStart: 1 + mipMapFadeDistanceEnd: 3 + bumpmap: + convertToNormalMap: 0 + externalNormalMap: 0 + heightScale: 0.25 + normalMapFilter: 0 + isReadable: 0 + streamingMipmaps: 0 + streamingMipmapsPriority: 0 + grayScaleToAlpha: 0 + generateCubemap: 6 + cubemapConvolution: 0 + seamlessCubemap: 0 + textureFormat: 1 + maxTextureSize: 2048 + textureSettings: + serializedVersion: 2 + filterMode: -1 + aniso: -1 + mipBias: -100 + wrapU: -1 + wrapV: -1 + wrapW: -1 + nPOTScale: 1 + lightmap: 0 + compressionQuality: 50 + spriteMode: 0 + spriteExtrude: 1 + spriteMeshType: 1 + alignment: 0 + spritePivot: {x: 0.5, y: 0.5} + spritePixelsToUnits: 100 + spriteBorder: {x: 0, y: 0, z: 0, w: 0} + spriteGenerateFallbackPhysicsShape: 1 + alphaUsage: 1 + alphaIsTransparency: 1 + spriteTessellationDetail: -1 + textureType: 0 + textureShape: 1 + singleChannelComponent: 0 + maxTextureSizeSet: 0 + compressionQualitySet: 0 + textureFormatSet: 0 + platformSettings: + - serializedVersion: 2 + buildTarget: DefaultTexturePlatform + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + androidETC2FallbackOverride: 0 + - serializedVersion: 2 + buildTarget: Standalone + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + androidETC2FallbackOverride: 0 + - serializedVersion: 2 + buildTarget: Windows Store Apps + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + androidETC2FallbackOverride: 0 + spriteSheet: + serializedVersion: 2 + sprites: [] + outline: [] + physicsShape: [] + bones: [] + spriteID: + vertices: [] + indices: + edges: [] + weights: [] + spritePackingTag: + pSDRemoveMatte: 0 + pSDShowRemoveMatteOption: 0 + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/MicrophoneSourceEditor.cs b/com.microsoft.mixedreality.webrtc/Editor/MicrophoneSourceEditor.cs new file mode 100644 index 0000000..ef65a95 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/MicrophoneSourceEditor.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEditor; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Inspector editor for . + /// + [CustomEditor(typeof(MicrophoneSource))] + [CanEditMultipleObjects] + public class MicrophoneSourceEditor : UnityEditor.Editor + { + SerializedProperty _autoGainControl; + SerializedProperty _audioSourceStopped; + + void OnEnable() + { + _autoGainControl = serializedObject.FindProperty("_autoGainControl"); + } + + /// + /// Override implementation of Editor.OnInspectorGUI + /// to draw the inspector GUI for the currently selected . + /// + public override void OnInspectorGUI() + { + serializedObject.Update(); + + if (!PlayerSettings.WSA.GetCapability(PlayerSettings.WSACapability.Microphone)) + { + EditorGUILayout.HelpBox("The UWP player is missing the Microphone capability. The MicrophoneSource component will not function correctly." + + " Add the Microphone capability in Project Settings > Player > UWP > Publishing Settings > Capabilities.", MessageType.Error); + if (GUILayout.Button("Open Player Settings")) + { + SettingsService.OpenProjectSettings("Project/Player"); + } + if (GUILayout.Button("Add Microphone Capability")) + { + PlayerSettings.WSA.SetCapability(PlayerSettings.WSACapability.Microphone, true); + } + } + + GUILayout.Space(10); + + EditorGUILayout.LabelField("Audio processing", EditorStyles.boldLabel); + EditorGUILayout.PropertyField(_autoGainControl); + + serializedObject.ApplyModifiedProperties(); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/MicrophoneSourceEditor.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/MicrophoneSourceEditor.cs.meta new file mode 100644 index 0000000..545369f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/MicrophoneSourceEditor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b5b57eb30bd3d85419bebdff6d5a9b54 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/Microsoft.MixedReality.WebRTC.Unity.Editor.asmdef b/com.microsoft.mixedreality.webrtc/Editor/Microsoft.MixedReality.WebRTC.Unity.Editor.asmdef new file mode 100644 index 0000000..492b2f3 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/Microsoft.MixedReality.WebRTC.Unity.Editor.asmdef @@ -0,0 +1,16 @@ +{ + "name": "Microsoft.MixedReality.WebRTC.Unity.Editor", + "references": [ + "Microsoft.MixedReality.WebRTC.Unity" + ], + "optionalUnityReferences": [], + "includePlatforms": [ + "Editor" + ], + "excludePlatforms": [], + "allowUnsafeCode": false, + "overrideReferences": false, + "precompiledReferences": [], + "autoReferenced": true, + "defineConstraints": [] +} \ No newline at end of file diff --git a/com.microsoft.mixedreality.webrtc/Editor/Microsoft.MixedReality.WebRTC.Unity.Editor.asmdef.meta b/com.microsoft.mixedreality.webrtc/Editor/Microsoft.MixedReality.WebRTC.Unity.Editor.asmdef.meta new file mode 100644 index 0000000..c2b0b07 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/Microsoft.MixedReality.WebRTC.Unity.Editor.asmdef.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: bab493aa30700344182f56419f1f775d +AssemblyDefinitionImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/PeerConnectionEditor.cs b/com.microsoft.mixedreality.webrtc/Editor/PeerConnectionEditor.cs new file mode 100644 index 0000000..ee6b5ab --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/PeerConnectionEditor.cs @@ -0,0 +1,293 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEngine; +using UnityEditor; +using UnityEditorInternal; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Custom editor for the component. + /// + [CustomEditor(typeof(PeerConnection))] + [CanEditMultipleObjects] + public class PeerConnectionEditor : UnityEditor.Editor + { + /// + /// Height of a single line of controls (e.g. single sender or receiver). + /// + const float kLineHeight = 22; + + /// + /// Spacing between list items (transceivers), for readability. + /// + const float kItemSpacing = 3; + + const float kIconSpacing = 25; + + SerializedProperty autoCreateOffer_; + SerializedProperty autoLogErrors_; + + SerializedProperty iceServers_; + SerializedProperty iceUsername_; + SerializedProperty iceCredential_; + + SerializedProperty onInitialized_; + SerializedProperty onShutdown_; + SerializedProperty onError_; + + ReorderableList transceiverList_; + SerializedProperty mediaLines_; + + enum IconType + { + Audio, + Video, + SendRecv, + RecvOnly, + SendOnly, + Inactive + } + Sprite[] sprites_ = new Sprite[6]; + + void DrawSpriteIcon(IconType type, Rect rect) + { + var sprite = sprites_[(int)type]; + var texture = sprite.texture; + Rect texCoords = sprite.textureRect; + Vector2 texelSize = texture.texelSize; + texCoords.x *= texelSize.x; + texCoords.y *= texelSize.y; + texCoords.width *= texelSize.x; + texCoords.height *= texelSize.y; + if (EditorGUIUtility.isProSkin) + { + texCoords.x += 0.5f; + } + GUI.DrawTextureWithTexCoords(rect, texture, texCoords); + } + + private void Awake() + { + // Load sprites for transceiver list control + var objects = AssetDatabase.LoadAllAssetsAtPath("Packages/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png"); + foreach (var obj in objects) + { + if (obj is Sprite sprite) + { + if (sprite.name == "icon_audio") + { + sprites_[(int)IconType.Audio] = sprite; + } + else if (sprite.name == "icon_video") + { + sprites_[(int)IconType.Video] = sprite; + } + else if (sprite.name == "icon_sendrecv") + { + sprites_[(int)IconType.SendRecv] = sprite; + } + else if (sprite.name == "icon_recvonly") + { + sprites_[(int)IconType.RecvOnly] = sprite; + } + else if (sprite.name == "icon_sendonly") + { + sprites_[(int)IconType.SendOnly] = sprite; + } + else if (sprite.name == "icon_inactive") + { + sprites_[(int)IconType.Inactive] = sprite; + } + } + } + } + + void OnEnable() + { + autoCreateOffer_ = serializedObject.FindProperty("AutoCreateOfferOnRenegotiationNeeded"); + autoLogErrors_ = serializedObject.FindProperty("AutoLogErrorsToUnityConsole"); + + iceServers_ = serializedObject.FindProperty("IceServers"); + iceUsername_ = serializedObject.FindProperty("IceUsername"); + iceCredential_ = serializedObject.FindProperty("IceCredential"); + + onInitialized_ = serializedObject.FindProperty("OnInitialized"); + onShutdown_ = serializedObject.FindProperty("OnShutdown"); + onError_ = serializedObject.FindProperty("OnError"); + + mediaLines_ = serializedObject.FindProperty("_mediaLines"); + transceiverList_ = new ReorderableList(serializedObject, mediaLines_, draggable: true, + displayHeader: true, displayAddButton: false, displayRemoveButton: true); + transceiverList_.elementHeightCallback = + (int index) => + { + float height = kItemSpacing + 2 * kLineHeight; + var element = transceiverList_.serializedProperty.GetArrayElementAtIndex(index); + var src = element.FindPropertyRelative("_source"); + if (src.isExpanded) + { + var trackName = element.FindPropertyRelative("SenderTrackName"); + // FIXME - SdpTokenDrawer.OnGUI() is called with h=16px instead of the total height, breaking the layout + height += kLineHeight; // EditorGUI.GetPropertyHeight(trackName) + kItemSpacing; + } + return height; + }; + transceiverList_.drawHeaderCallback = (Rect rect) => EditorGUI.LabelField(rect, "Transceivers"); + transceiverList_.drawElementCallback = + (Rect rect, int index, bool isActive, bool isFocused) => + { + var element = transceiverList_.serializedProperty.GetArrayElementAtIndex(index); + float x0 = rect.x; + float x1 = x0 + 16; + float y0 = rect.y + 2; + float y1 = y0 + kLineHeight; + + // MID value + EditorGUI.LabelField(new Rect(x0 - 14, y1, 20, 20), $"{index}"); + + // Audio or video icon for transceiver kind + MediaKind mediaKind = (MediaKind)element.FindPropertyRelative("_mediaKind").intValue; + System.Type senderType, receiverType; + if (mediaKind == MediaKind.Audio) + { + senderType = typeof(AudioTrackSource); + receiverType = typeof(AudioReceiver); + DrawSpriteIcon(IconType.Audio, new Rect(x0, rect.y, 20, 20)); + } + else + { + senderType = typeof(VideoTrackSource); + receiverType = typeof(VideoReceiver); + DrawSpriteIcon(IconType.Video, new Rect(x0, rect.y, 20, 20)); + } + + rect.x += (kIconSpacing + 10); + rect.width -= (kIconSpacing + 10); + + float fieldWidth = rect.width; + bool hasSender = false; + bool hasReceiver = false; + bool sourceIsExpanded = false; + { + var p = element.FindPropertyRelative("_source"); + Object obj = p.objectReferenceValue; + sourceIsExpanded = EditorGUI.Foldout(new Rect(rect.x, y0, 0, EditorGUIUtility.singleLineHeight), p.isExpanded, new GUIContent()); + p.isExpanded = sourceIsExpanded; + obj = EditorGUI.ObjectField( + new Rect(rect.x, y0, fieldWidth, EditorGUIUtility.singleLineHeight), + obj, senderType, true); + hasSender = (obj != null); + p.objectReferenceValue = obj; + y0 += kLineHeight; + } + if (sourceIsExpanded) + { + var p = element.FindPropertyRelative("_senderTrackName"); + // FIXME - SdpTokenDrawer.OnGUI() is called with h=16px instead of the total height, breaking the layout + //EditorGUI.PropertyField(new Rect(rect.x + 10, y0, fieldWidth - 8, EditorGUIUtility.singleLineHeight), p); + //y0 += EditorGUI.GetPropertyHeight(p) + 6; + string val = p.stringValue; + val = EditorGUI.TextField(new Rect(rect.x + 10, y0, fieldWidth - 8, EditorGUIUtility.singleLineHeight), "Track name", val); + p.stringValue = val; + y0 += kLineHeight; + } + { + var p = element.FindPropertyRelative("_receiver"); + Object obj = p.objectReferenceValue; + obj = EditorGUI.ObjectField( + new Rect(rect.x, y0, fieldWidth, EditorGUIUtility.singleLineHeight), + obj, receiverType, true); + hasReceiver = (obj != null); + p.objectReferenceValue = obj; + } + + IconType iconType = IconType.Inactive; + if (hasSender) + { + if (hasReceiver) + { + iconType = IconType.SendRecv; + } + else + { + iconType = IconType.SendOnly; + } + } + else if (hasReceiver) + { + iconType = IconType.RecvOnly; + } + DrawSpriteIcon(iconType, new Rect(x0, y1, 16, 16)); + }; + transceiverList_.drawNoneElementCallback = (Rect rect) => + { + GUIStyle style = new GUIStyle(EditorStyles.label); + style.alignment = TextAnchor.MiddleCenter; + EditorGUI.LabelField(rect, "(empty)", style); + }; + } + + public override void OnInspectorGUI() + { + serializedObject.Update(); + +#if UNITY_WSA + if (!PlayerSettings.WSA.GetCapability(PlayerSettings.WSACapability.Microphone)) + { + EditorGUILayout.HelpBox("The UWP player is missing the Microphone capability. Currently on UWP the native WebRTC implementation always tries to" + + " open the microphone while initializing the audio subsystem at startup. Not granting access will fail initialization, and generally crash the app." + + " Add the Microphone capability in Project Settings > Player > UWP > Publishing Settings > Capabilities.", MessageType.Error); + if (GUILayout.Button("Open Player Settings")) + { + SettingsService.OpenProjectSettings("Project/Player"); + } + if (GUILayout.Button("Add Microphone Capability")) + { + PlayerSettings.WSA.SetCapability(PlayerSettings.WSACapability.Microphone, true); + } + } +#endif + + EditorGUILayout.Space(); + + EditorGUILayout.PropertyField(autoLogErrors_, new GUIContent("Log errors to the Unity console", + "Log the WebRTC errors to the Unity console.")); + + EditorGUILayout.Space(); + + EditorGUILayout.LabelField("Signaling", EditorStyles.boldLabel); + EditorGUILayout.PropertyField(iceServers_, true); + EditorGUILayout.PropertyField(iceUsername_); + EditorGUILayout.PropertyField(iceCredential_); + + EditorGUILayout.Space(); + + EditorGUILayout.LabelField("Media", EditorStyles.boldLabel); + EditorGUILayout.PropertyField(autoCreateOffer_); + transceiverList_.DoLayoutList(); + using (var _ = new EditorGUILayout.HorizontalScope()) + { + if (GUILayout.Button("+ Audio", EditorStyles.miniButton)) + { + ((PeerConnection)serializedObject.targetObject).AddMediaLine(MediaKind.Audio); + } + if (GUILayout.Button("+ Video", EditorStyles.miniButton)) + { + ((PeerConnection)serializedObject.targetObject).AddMediaLine(MediaKind.Video); + } + } + + EditorGUILayout.Space(); + + EditorGUILayout.LabelField("Events", EditorStyles.boldLabel); + EditorGUILayout.PropertyField(onInitialized_); + EditorGUILayout.PropertyField(onShutdown_); + EditorGUILayout.PropertyField(onError_); + + serializedObject.ApplyModifiedProperties(); + } + } + +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/PeerConnectionEditor.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/PeerConnectionEditor.cs.meta new file mode 100644 index 0000000..8d93630 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/PeerConnectionEditor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: defa9eea11e09dc4b951b5f414f839e9 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/SdpTokenDrawer.cs b/com.microsoft.mixedreality.webrtc/Editor/SdpTokenDrawer.cs new file mode 100644 index 0000000..f86dd90 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/SdpTokenDrawer.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; +using UnityEditor; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Property drawer for , to validate the associated string + /// property content and display an error message box if invalid characters are found. + /// + [CustomPropertyDrawer(typeof(SdpTokenAttribute))] + public class SdpTokenDrawer : PropertyDrawer + { + private const int c_errorMessageHeight = 35; + + public override void OnGUI(Rect position, SerializedProperty property, GUIContent label) + { + try + { + var sdpTokenAttr = attribute as SdpTokenAttribute; + SdpTokenAttribute.Validate(property.stringValue, sdpTokenAttr.AllowEmpty); + } + catch (ArgumentException) + { + // Display error message below the property + var totalHeight = position.height; + position.yMin = position.yMax - c_errorMessageHeight; + EditorGUI.HelpBox(position, "Invalid characters in property. SDP tokens cannot contain some characters like space or quote. See SdpTokenAttribute.Validate() for details.", MessageType.Error); + + // Adjust rect for the property itself + position.yMin = position.yMax - totalHeight; + position.yMax -= c_errorMessageHeight; + } + + EditorGUI.PropertyField(position, property, label); + } + + public override float GetPropertyHeight(SerializedProperty property, GUIContent label) + { + float height = base.GetPropertyHeight(property, label); + try + { + var sdpTokenAttr = attribute as SdpTokenAttribute; + SdpTokenAttribute.Validate(property.stringValue, sdpTokenAttr.AllowEmpty); + } + catch (ArgumentException) + { + // Add extra space for the error message + height += c_errorMessageHeight; + } + return height; + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/SdpTokenDrawer.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/SdpTokenDrawer.cs.meta new file mode 100644 index 0000000..f5d5d26 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/SdpTokenDrawer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 51d1ddf7db40c6948a42f912402d20a7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/ToggleLeftDrawer.cs b/com.microsoft.mixedreality.webrtc/Editor/ToggleLeftDrawer.cs new file mode 100644 index 0000000..410b84f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/ToggleLeftDrawer.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; +using UnityEditor; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Property drawer for . + /// + [CustomPropertyDrawer(typeof(ToggleLeftAttribute))] + public class ToggleLeftDrawer : PropertyDrawer + { + public override void OnGUI(Rect position, SerializedProperty property, GUIContent label) + { + using (new EditorGUI.PropertyScope(position, label, property)) + { + property.boolValue = EditorGUI.ToggleLeft(position, label, property.boolValue); + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/ToggleLeftDrawer.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/ToggleLeftDrawer.cs.meta new file mode 100644 index 0000000..9de26a2 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/ToggleLeftDrawer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 317c7bfb57fe2824cbe37db424aaabe8 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/VideoRendererEditor.cs b/com.microsoft.mixedreality.webrtc/Editor/VideoRendererEditor.cs new file mode 100644 index 0000000..2a644c1 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/VideoRendererEditor.cs @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEditor; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Inspector editor for . + /// + [CustomEditor(typeof(VideoRenderer))] + [CanEditMultipleObjects] + public class VideoRendererEditor : UnityEditor.Editor + { + SerializedProperty _maxFramerate; + SerializedProperty _enableStatistics; + SerializedProperty _frameLoadStatHolder; + SerializedProperty _framePresentStatHolder; + SerializedProperty _frameSkipStatHolder; + + void OnEnable() + { + _maxFramerate = serializedObject.FindProperty("MaxFramerate"); + _enableStatistics = serializedObject.FindProperty("EnableStatistics"); + _frameLoadStatHolder = serializedObject.FindProperty("FrameLoadStatHolder"); + _framePresentStatHolder = serializedObject.FindProperty("FramePresentStatHolder"); + _frameSkipStatHolder = serializedObject.FindProperty("FrameSkipStatHolder"); + } + + /// + /// Override implementation of Editor.OnInspectorGUI + /// to draw the inspector GUI for the currently selected . + /// + public override void OnInspectorGUI() + { + serializedObject.Update(); + + GUILayout.Space(10); + + EditorGUILayout.LabelField("Video", EditorStyles.boldLabel); + EditorGUILayout.PropertyField(_maxFramerate); + + GUILayout.Space(10); + + EditorGUILayout.PropertyField(_enableStatistics); + if (_enableStatistics.boolValue) + { + using (new EditorGUI.IndentLevelScope()) + { + EditorGUILayout.PropertyField(_frameLoadStatHolder); + EditorGUILayout.PropertyField(_framePresentStatHolder); + EditorGUILayout.PropertyField(_frameSkipStatHolder); + } + } + + serializedObject.ApplyModifiedProperties(); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/VideoRendererEditor.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/VideoRendererEditor.cs.meta new file mode 100644 index 0000000..d358f5d --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/VideoRendererEditor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: af70ba26c91da4f48bb7da544db3fc9e +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Editor/WebcamSourceEditor.cs b/com.microsoft.mixedreality.webrtc/Editor/WebcamSourceEditor.cs new file mode 100644 index 0000000..4630a4a --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/WebcamSourceEditor.cs @@ -0,0 +1,497 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEditor; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Inspector editor for . + /// + [CustomEditor(typeof(WebcamSource))] + [CanEditMultipleObjects] + public class WebcamSourceEditor : UnityEditor.Editor + { + SerializedProperty _enableMixedRealityCapture; + SerializedProperty _enableMrcRecordingIndicator; + SerializedProperty _formatMode; + SerializedProperty _videoProfileId; + SerializedProperty _videoProfileKind; + SerializedProperty _constraints; + SerializedProperty _width; + SerializedProperty _height; + SerializedProperty _framerate; + SerializedProperty _videoStreamStarted; + SerializedProperty _videoStreamStopped; + + GUIContent _anyContent; + float _anyWidth; + float _unitWidth; + + int _prevWidth = 640; + int _prevHeight = 480; + double _prevFramerate = 30.0; + VideoProfileKind _prevVideoProfileKind = VideoProfileKind.VideoConferencing; + string _prevVideoProfileId = ""; + + /// + /// Helper enumeration for commonly used video codecs. + /// The enum names must match exactly the standard SDP naming. + /// See https://en.wikipedia.org/wiki/RTP_audio_video_profile for reference. + /// + enum SdpVideoCodecs + { + /// + /// Do not force any codec, let WebRTC decide. + /// + None, + + /// + /// Try to use H.264 if available. + /// + H264, + + /// + /// Try to use VP8 if available. + /// + VP8, + + /// + /// Try to use VP9 if available. + /// + VP9, + + /// + /// Try to use the given codec if available. + /// + Custom + } + + void OnEnable() + { + _enableMixedRealityCapture = serializedObject.FindProperty("EnableMixedRealityCapture"); + _enableMrcRecordingIndicator = serializedObject.FindProperty("EnableMRCRecordingIndicator"); + _formatMode = serializedObject.FindProperty("FormatMode"); + _videoProfileId = serializedObject.FindProperty("VideoProfileId"); + _videoProfileKind = serializedObject.FindProperty("VideoProfileKind"); + _constraints = serializedObject.FindProperty("Constraints"); + _width = _constraints.FindPropertyRelative("width"); + _height = _constraints.FindPropertyRelative("height"); + _framerate = _constraints.FindPropertyRelative("framerate"); + _videoStreamStarted = serializedObject.FindProperty("VideoStreamStarted"); + _videoStreamStopped = serializedObject.FindProperty("VideoStreamStopped"); + + _anyContent = new GUIContent("(any)"); + _anyWidth = -1f; // initialized later + _unitWidth = -1f; // initialized later + } + + /// + /// Override implementation of Editor.OnInspectorGUI + /// to draw the inspector GUI for the currently selected . + /// + public override void OnInspectorGUI() + { + // CalcSize() can only be called inside a GUI method + if (_anyWidth < 0) + _anyWidth = GUI.skin.label.CalcSize(_anyContent).x; + if (_unitWidth < 0) + _unitWidth = GUI.skin.label.CalcSize(new GUIContent("fps")).x; + + serializedObject.Update(); + + if (!PlayerSettings.WSA.GetCapability(PlayerSettings.WSACapability.WebCam)) + { + EditorGUILayout.HelpBox("The UWP player is missing the WebCam capability. The WebcamSource component will not function correctly." + + " Add the WebCam capability in Project Settings > Player > UWP > Publishing Settings > Capabilities.", MessageType.Error); + if (GUILayout.Button("Open Player Settings")) + { + SettingsService.OpenProjectSettings("Project/Player"); + } + if (GUILayout.Button("Add WebCam Capability")) + { + PlayerSettings.WSA.SetCapability(PlayerSettings.WSACapability.WebCam, true); + } + } + + GUILayout.Space(10); + + EditorGUILayout.LabelField("Video capture", EditorStyles.boldLabel); + EditorGUILayout.PropertyField(_formatMode, new GUIContent("Capture format", + "Decide how to obtain the constraints used to select the best capture format.")); + if ((LocalVideoSourceFormatMode)_formatMode.intValue == LocalVideoSourceFormatMode.Manual) + { + using (new EditorGUI.IndentLevelScope()) + { + EditorGUILayout.LabelField("General constraints (all platforms)"); + using (new EditorGUI.IndentLevelScope()) + { + OptionalIntField(_width, ref _prevWidth, + new GUIContent("Width", "Only consider capture formats with the specified width."), + new GUIContent("px", "Pixels")); + OptionalIntField(_height, ref _prevHeight, + new GUIContent("Height", "Only consider capture formats with the specified height."), + new GUIContent("px", "Pixels")); + OptionalDoubleField(_framerate, ref _prevFramerate, + new GUIContent("Framerate", "Only consider capture formats with the specified framerate."), + new GUIContent("fps", "Frames per second")); + } + + EditorGUILayout.LabelField("UWP constraints"); + using (new EditorGUI.IndentLevelScope()) + { + OptionalEnumField(_videoProfileKind, VideoProfileKind.Unspecified, ref _prevVideoProfileKind, + new GUIContent("Video profile kind", "Only consider capture formats associated with the specified video profile kind.")); + OptionalTextField(_videoProfileId, ref _prevVideoProfileId, + new GUIContent("Video profile ID", "Only consider capture formats associated with the specified video profile.")); + if ((_videoProfileKind.intValue != (int)VideoProfileKind.Unspecified) && (_videoProfileId.stringValue.Length > 0)) + { + EditorGUILayout.HelpBox("Video profile ID is already unique. Specifying also a video kind over-constrains the selection algorithm and can decrease the chances of finding a matching video profile. It is recommended to select either a video profile kind, or a video profile ID.", MessageType.Warning); + } + } + } + } + _enableMixedRealityCapture.boolValue = EditorGUILayout.ToggleLeft("Enable Mixed Reality Capture (MRC)", _enableMixedRealityCapture.boolValue); + if (_enableMixedRealityCapture.boolValue) + { + using (var scope = new EditorGUI.IndentLevelScope()) + { + _enableMrcRecordingIndicator.boolValue = EditorGUILayout.ToggleLeft("Show recording indicator in device", _enableMrcRecordingIndicator.boolValue); + if (!PlayerSettings.virtualRealitySupported) + { + EditorGUILayout.HelpBox("Mixed Reality Capture can only work in exclusive-mode apps. XR support must be enabled in Project Settings > Player > XR Settings > Virtual Reality Supported, and the project then saved to disk.", MessageType.Error); + if (GUILayout.Button("Enable XR support")) + { + PlayerSettings.virtualRealitySupported = true; + } + } + } + } + + GUILayout.Space(10); + + EditorGUILayout.PropertyField(_videoStreamStarted); + EditorGUILayout.PropertyField(_videoStreamStopped); + + serializedObject.ApplyModifiedProperties(); + } + + /// + /// ToggleLeft control associated with a given SerializedProperty, to enable automatic GUI + /// handlings like Prefab revert menu. + /// + /// The boolean property associated with the control. + /// The label to display next to the toggle control. + private void ToggleLeft(SerializedProperty property, GUIContent label) + { + var rect = EditorGUILayout.GetControlRect(); + using (new EditorGUI.PropertyScope(rect, label, property)) + { + property.boolValue = EditorGUI.ToggleLeft(rect, label, property.boolValue); + } + } + + /// + /// IntField with optional toggle associated with a given SerializedProperty, to enable + /// automatic GUI handlings like Prefab revert menu. + /// + /// Valid integer values are any non-zero positive integer. Any negative or zero value + /// is considered invalid, and means that the value is considered as not set, which shows + /// up as an unchecked left toggle widget. + /// + /// To enforce a valid value when the toggle control is checked by the user, a default valid + /// value is provided . For UI consistency, the last selected + /// valid value is returned in , to allow toggling the field + /// ON and OFF without losing the valid value it previously had. + /// + /// The integer property associated with the control. + /// + /// Default value if the property value is invalid (negative or zero). + /// Assigned the new value on return if valid. + /// + /// The label to display next to the toggle control. + /// The label indicating the unit of the value. + private void OptionalIntField(SerializedProperty intProperty, ref int lastValidValue, GUIContent label, GUIContent unitLabel) + { + if (lastValidValue <= 0) + { + throw new ArgumentOutOfRangeException("Default value cannot be invalid."); + } + + using (new EditorGUILayout.HorizontalScope()) + { + var rect = EditorGUILayout.GetControlRect(); + using (new EditorGUI.PropertyScope(rect, label, intProperty)) + { + bool hadValidValue = (intProperty.intValue > 0); + bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue); + int newValue = intProperty.intValue; + if (needsValidValue) + { + // Force a valid value, otherwise the edit field won't show up + if (newValue <= 0) + { + newValue = lastValidValue; + } + + // Make updating the value of the serialized property delayed to allow overriding the + // value the user will input before it's assigned to the property, for validation. + newValue = EditorGUILayout.DelayedIntField(newValue); + if (newValue < 0) + { + newValue = 0; + } + } + else + { + // Force invalid value for consistency, otherwise this breaks Prefab revert + newValue = 0; + } + intProperty.intValue = newValue; + if (newValue > 0) + { + GUILayout.Label(unitLabel, GUILayout.Width(_unitWidth)); + + // Save valid value as new default. This allows toggling the toggle widget ON and OFF + // without losing the value previously input. This works only while the inspector is + // alive, that is while the object is select, but is better than nothing. + lastValidValue = newValue; + } + else + { + GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth)); + } + } + } + } + + /// + /// DoubleField with optional toggle associated with a given SerializedProperty, to enable + /// automatic GUI handlings like Prefab revert menu. + /// + /// Valid doubles values are any non-zero positive doubles. Any negative or zero value + /// is considered invalid, and means that the value is considered as not set, which shows + /// up as an unchecked left toggle widget. + /// + /// To enforce a valid value when the toggle control is checked by the user, a default valid + /// value is provided . For UI consistency, the last selected + /// valid value is returned in , to allow toggling the field + /// ON and OFF without losing the valid value it previously had. + /// + /// The double property associated with the control. + /// + /// Default value if the property value is invalid (negative or zero). + /// Assigned the new value on return if valid. + /// + /// The label to display next to the toggle control. + /// The label indicating the unit of the value. + private void OptionalDoubleField(SerializedProperty doubleProperty, ref double lastValidValue, GUIContent label, GUIContent unitLabel) + { + if (lastValidValue <= 0.0) + { + throw new ArgumentOutOfRangeException("Default value cannot be invalid."); + } + + using (new EditorGUILayout.HorizontalScope()) + { + var rect = EditorGUILayout.GetControlRect(); + using (new EditorGUI.PropertyScope(rect, label, doubleProperty)) + { + bool hadValidValue = (doubleProperty.doubleValue > 0.0); + bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue); + double newValue = doubleProperty.doubleValue; + if (needsValidValue) + { + // Force a valid value, otherwise the edit field won't show up + if (newValue <= 0.0) + { + newValue = lastValidValue; + } + + // Make updating the value of the serialized property delayed to allow overriding the + // value the user will input before it's assigned to the property, for validation. + newValue = EditorGUILayout.DelayedDoubleField(newValue); + if (newValue < 0.0) + { + newValue = 0.0; + } + } + else + { + // Force invalid value for consistency, otherwise this breaks Prefab revert + newValue = 0.0; + } + doubleProperty.doubleValue = newValue; + if (newValue > 0.0) + { + GUILayout.Label(unitLabel, GUILayout.Width(_unitWidth)); + + // Save valid value as new default. This allows toggling the toggle widget ON and OFF + // without losing the value previously input. This works only while the inspector is + // alive, that is while the object is select, but is better than nothing. + lastValidValue = newValue; + } + else + { + GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth)); + } + } + } + } + + /// + /// Helper to convert an enum to its integer value. + /// + /// The enum type. + /// The enum value. + /// The integer value associated with . + public static int EnumToInt(TValue value) where TValue : Enum => (int)(object)value; + + /// + /// Helper to convert an integer to its enum value. + /// + /// The enum type. + /// The integer value. + /// The enum value whose integer value is . + public static TValue IntToEnum(int value) where TValue : Enum => (TValue)(object)value; + + /// + /// EnumPopup with optional toggle associated with a given SerializedProperty, to enable + /// automatic GUI handlings like Prefab revert menu. + /// + /// Valid enum values are any value different from . A value of + /// is considered invalid, and means that the value is considered as + /// not set, which shows up as an unchecked left toggle widget. + /// + /// To enforce a valid value when the toggle control is checked by the user, a default valid value + /// is provided which must be different from . + /// For UI consistency, the last selected valid value is returned in , + /// to allow toggling the field ON and OFF without losing the valid value it previously had. + /// + /// The enum property associated with the control. + /// Value considered to be "invalid", which deselects the toggle control. + /// + /// Default value if the property value is not . + /// Assigned the new value on return if not . + /// + /// The label to display next to the toggle control. + private void OptionalEnumField(SerializedProperty enumProperty, T nilValue, ref T lastValidValue, GUIContent label) where T : Enum + { + if (nilValue.CompareTo(lastValidValue) == 0) + { + throw new ArgumentOutOfRangeException("Default value cannot be invalid."); + } + + using (new EditorGUILayout.HorizontalScope()) + { + var rect = EditorGUILayout.GetControlRect(); + using (new EditorGUI.PropertyScope(rect, label, enumProperty)) + { + bool hadValidValue = (enumProperty.intValue != EnumToInt(nilValue)); + bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue); + T newValue = IntToEnum(enumProperty.intValue); + if (needsValidValue) + { + // Force a valid value, otherwise the popup control won't show up + if (newValue.CompareTo(nilValue) == 0) + { + newValue = lastValidValue; + } + + newValue = (T)EditorGUILayout.EnumPopup(newValue); + } + else + { + // Force invalid value for consistency, otherwise this breaks Prefab revert + newValue = nilValue; + } + enumProperty.intValue = EnumToInt(newValue); + if (newValue.CompareTo(nilValue) != 0) + { + // Save valid value as new default. This allows toggling the toggle widget ON and OFF + // without losing the value previously input. This works only while the inspector is + // alive, that is while the object is select, but is better than nothing. + lastValidValue = newValue; + } + else + { + GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth)); + } + } + } + } + + /// + /// TextField with optional toggle associated with a given SerializedProperty, to enable + /// automatic GUI handlings like Prefab revert menu. + /// + /// Valid string values are any non-empty non-space-only string. Any empty string or string + /// made up of only spaces is considered invalid, and means that the value is considered as + /// not set, which shows up as an unchecked left toggle widget. + /// + /// To enforce a valid value when the toggle control is checked by the user, a default valid + /// value is provided . For UI consistency, the last selected + /// valid value is returned in , to allow toggling the field + /// ON and OFF without losing the valid value it previously had. + /// + /// The string property associated with the control. + /// + /// Default value if the property value null or whitespace. + /// Assigned the new value on return if valid. + /// + /// The label to display next to the toggle control. + private void OptionalTextField(SerializedProperty stringProperty, ref string lastValidValue, GUIContent label) + { + if (string.IsNullOrWhiteSpace(lastValidValue)) + { + throw new ArgumentOutOfRangeException("Default value cannot be invalid."); + } + + using (new EditorGUILayout.HorizontalScope()) + { + var rect = EditorGUILayout.GetControlRect(); + using (new EditorGUI.PropertyScope(rect, label, stringProperty)) + { + bool hadValidValue = !string.IsNullOrWhiteSpace(stringProperty.stringValue); + bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue); + string newValue = stringProperty.stringValue; + if (needsValidValue) + { + // Force a valid value, otherwise the edit field won't show up + if (string.IsNullOrWhiteSpace(newValue)) + { + newValue = lastValidValue; + } + + // Make updating the value of the serialized property delayed to allow overriding the + // value the user will input before it's assigned to the property, for validation. + newValue = EditorGUILayout.DelayedTextField(newValue); + if (string.IsNullOrWhiteSpace(newValue)) + { + newValue = string.Empty; + } + } + else + { + // Force invalid value for consistency, otherwise this breaks Prefab revert + newValue = string.Empty; + } + stringProperty.stringValue = newValue; + if (!string.IsNullOrWhiteSpace(newValue)) + { + // Save valid value as new default. This allows toggling the toggle widget ON and OFF + // without losing the value previously input. This works only while the inspector is + // alive, that is while the object is select, but is better than nothing. + lastValidValue = newValue; + } + else + { + GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth)); + } + } + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Editor/WebcamSourceEditor.cs.meta b/com.microsoft.mixedreality.webrtc/Editor/WebcamSourceEditor.cs.meta new file mode 100644 index 0000000..a0d05b2 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Editor/WebcamSourceEditor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 73af9a06137c0b949b7b86936464626a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/LICENSE.md b/com.microsoft.mixedreality.webrtc/LICENSE.md new file mode 100644 index 0000000..5cf7c8d --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Microsoft Corporation. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/com.microsoft.mixedreality.webrtc/LICENSE.md.meta b/com.microsoft.mixedreality.webrtc/LICENSE.md.meta new file mode 100644 index 0000000..1c9ce28 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/LICENSE.md.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 441ac42ff1655da48934640cec15925c +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/README.md b/com.microsoft.mixedreality.webrtc/README.md new file mode 100644 index 0000000..08bfbea --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/README.md @@ -0,0 +1,89 @@ +# MixedReality-WebRTC for Unity + +![Unity3D](https://img.shields.io/badge/Unity3D-2018.4%2B-ff4080) +[![Licensed under the MIT License](https://img.shields.io/badge/License-MIT-blue.svg)](https://github.com/microsoft/MixedReality-WebRTC-Unity/blob/master/LICENSE) +[![Holodevelopers channel on Slack](https://img.shields.io/badge/slack-@holodevelopers-%23972299.svg?logo=slack)](https://holodevelopers.slack.com/messages/CN1A7JB3R) +[![Under active development](https://img.shields.io/badge/status-active-green.svg)](https://github.com/microsoft/MixedReality-WebRTC-Unity/commits/master) +[![Build Status](https://dev.azure.com/aipmr/MixedReality-WebRTC-CI/_apis/build/status/mr-webrtc-unity-ci?branchName=master)](https://dev.azure.com/aipmr/MixedReality-WebRTC-CI/_build/latest?definitionId=TODO&branchName=master) + +MixedReality-WebRTC for Unity (`com.microsoft.mixedreality.webrtc`) is a package for [Unity](https://unity.com/) to help mixed reality app developers integrate peer-to-peer real-time audio, video, and data communication into their application, and improve their collaborative experience. + +## Features + +- Enables **multi-track real-time audio / video / data communication** with a remote peer. Multiple connections can be used in parallel to communicate with multiple peers. +- Provides drop-in components for: + - Peer connection and automated media tracks management + - Local audio and video capture from **webcam** and **microphone** + - Remote audio output via an AudioSource component, for **2D and spatial audio** + - Remote video rendering through a Unity texture, for use on any mesh + - **Scene video streaming** from any Unity Camera component (stream-what-you-see) +- Supports in-editor use (Play mode) +- Supports UWP devices, including Microsoft **HoloLens** (x86) and Microsoft **HoloLens 2** (ARM) +- Allows easy use of **[Mixed Reality Capture (MRC)](https://docs.microsoft.com/en-us/windows/mixed-reality/mixed-reality-capture)** to stream the view point of the user for multi-device experiences + +MixedReality-WebRTC for Unity is part of the open-source [MixedReality-WebRTC](https://github.com/microsoft/MixedReality-WebRTC/) project hosted on GitHub, and leverages the C# library and native implementation of that project. + +## Install + +This package can be imported directly into an existing Unity project as a custom package: + +- Open Unity and load the Unity project to add the MixedReality-WebRTC library to. + +- Download the latest package from [the GitHub Releases page](https://github.com/microsoft/MixedReality-WebRTC/releases) and unzip it somewhere on your local hard disk. + +- Go to the **Package Manager** window (_Window_ > _Package Manager_) + +- Expand the "+" menu and select "_Add package from disk..._" + + ![Select Add package from disk...](Documentation~/install1.png) + + _Note: In Unity 2018.4, the "+" menu is located at the bottom left of the Package Manager window._ + +- Select the `package.json` file from the place where the package was unzipped. + +This installs the package and add a reference in the currently open Unity project. + +![The MixedReality-WebRTC package is added to the current project](Documentation~/install2.png) + +See Unity's documentation on [Installing a local package](https://docs.unity3d.com/Manual/upm-ui-local.html) for more details. + +## Documentation + +The official documentation of the MixedReality-WebRTC project is hosted at [https://microsoft.github.io/MixedReality-WebRTC/manual/unity-integration.html](https://microsoft.github.io/MixedReality-WebRTC/manual/unity-integration.html). + +An API reference is also available at [https://microsoft.github.io/MixedReality-WebRTC/api/Microsoft.MixedReality.WebRTC.Unity.html](https://microsoft.github.io/MixedReality-WebRTC/api/Microsoft.MixedReality.WebRTC.Unity.html) + +## Samples + +Official samples are hosted in a separate package (`com.microsoft.mixedreality.webrtc.samples`) also available from [the GitHub Releases page](https://github.com/microsoft/MixedReality-WebRTC/releases). + +## Special considerations for HoloLens 2 + +- Mixed Reality Capture (MRC) has some inherent limitations: + - **MRC only works up to 1080p** (see the [Mixed reality capture for developers](https://docs.microsoft.com/en-us/windows/mixed-reality/mixed-reality-capture-for-developers) documentation), but the default resolution of the webcam on HoloLens 2 is 2272 x 1278 (see the [Locatable Camera](https://docs.microsoft.com/en-us/windows/mixed-reality/locatable-camera) documentation). In order to access different resolutions, one need to use a different video profile, like the `VideoRecording` or `VideoConferencing` ones. This is handled automatically in the Unity integration layer (see [here](https://github.com/microsoft/MixedReality-WebRTC/blob/9a81c94cf01786398495f8046b645b7b28d987de/libs/Microsoft.MixedReality.WebRTC.Unity/Assets/Microsoft.MixedReality.WebRTC.Unity/Scripts/Media/LocalVideoSource.cs#L210-L237)) if `LocalVideoSource.Mode = Automatic` (default), but must be handled manually if using the C# library directly. + - **MRC requires special permission** to record the content of the screen: + - For shared apps (2D slates), this corresponds to the `screenDuplication` [restricted capability](https://docs.microsoft.com/en-us/windows/uwp/packaging/app-capability-declarations#restricted-capabilities), which **cannot be obtained by third-party applications**. + - For exclusive-mode apps (fullscreen), there is no particular UWP capability, but the recorded content is limited to the application's own content. +- Be sure to use `PreferredVideoCodec = "H264"` to avail of the hardware encoder present on the device; software encoding with _e.g._ VP8 or VP9 codecs is very CPU intensive and strongly discouraged. + +## Known Issues + +The current version is under active development, and contains known issues inherited from the underlying C# library of the MixedReality-WebRTC sister project: + +- HoloLens 2 exhibits some small performance penalty due to the [missing support (#157)](https://github.com/webrtc-uwp/webrtc-uwp-sdk/issues/157) for SIMD-accelerated YUV conversion in WebRTC UWP SDK on ARM. +- H.264 hardware video encoding (UWP only) exhibits some quality degrading (blockiness). See [#74](https://github.com/microsoft/MixedReality-WebRTC/issues/74) and [#101](https://github.com/microsoft/MixedReality-WebRTC/issues/101) for details. +- H.264 is not currently available on Desktop. Only VP8 and VP9 are available instead (software). + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit [https://cla.microsoft.com](https://cla.microsoft.com). + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (_e.g._, label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + +## Reporting security issues and bugs + +MixedReality-WebRTC-Unity builds upon the WebRTC implementation provided by Google. Security issues and bugs related to this implementation should be reported to Google. + +Security issues and bugs related to MixedReality-WebRTC and MixedReality-WebRTC-Unity themselves, or to WebRTC UWP SDK, should be reported privately, via email, to the Microsoft Security Response Center (MSRC) secure@microsoft.com. You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Further information, including the MSRC PGP key, can be found in the [Security TechCenter](https://technet.microsoft.com/en-us/security/ff852094.aspx). diff --git a/com.microsoft.mixedreality.webrtc/README.md.meta b/com.microsoft.mixedreality.webrtc/README.md.meta new file mode 100644 index 0000000..45ac3de --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/README.md.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: a39d36a50babebf428f22acf02fa12b5 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime.meta b/com.microsoft.mixedreality.webrtc/Runtime.meta new file mode 100644 index 0000000..16dcba2 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: b12d4c42e6df3304d8c9d14014cb63ce +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Materials.meta b/com.microsoft.mixedreality.webrtc/Runtime/Materials.meta new file mode 100644 index 0000000..796ee85 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Materials.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 2ac3ec7467bfa7e42a9e59201173e535 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Materials/ARGBFeedMaterial.mat b/com.microsoft.mixedreality.webrtc/Runtime/Materials/ARGBFeedMaterial.mat new file mode 100644 index 0000000..ca26628 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Materials/ARGBFeedMaterial.mat @@ -0,0 +1,78 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!21 &2100000 +Material: + serializedVersion: 6 + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: ARGBFeedMaterial + m_Shader: {fileID: 4800000, guid: 774ee07e70a065847b90a93e9c377d33, type: 3} + m_ShaderKeywords: + m_LightmapFlags: 4 + m_EnableInstancingVariants: 1 + m_DoubleSidedGI: 0 + m_CustomRenderQueue: -1 + stringTagMap: {} + disabledShaderPasses: [] + m_SavedProperties: + serializedVersion: 3 + m_TexEnvs: + - _BumpMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailAlbedoMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailMask: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailNormalMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _EmissionMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _MainTex: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _MetallicGlossMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _OcclusionMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _ParallaxMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + m_Floats: + - _BumpScale: 1 + - _Cutoff: 0.5 + - _DetailNormalMapScale: 1 + - _DstBlend: 0 + - _GlossMapScale: 1 + - _Glossiness: 0.5 + - _GlossyReflections: 1 + - _Metallic: 0 + - _Mirror: 0 + - _Mode: 0 + - _OcclusionStrength: 1 + - _Parallax: 0.02 + - _SmoothnessTextureChannel: 0 + - _SpecularHighlights: 1 + - _SrcBlend: 1 + - _UVSec: 0 + - _ZWrite: 1 + m_Colors: + - _Color: {r: 1, g: 1, b: 1, a: 1} + - _EmissionColor: {r: 0, g: 0, b: 0, a: 1} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Materials/ARGBFeedMaterial.mat.meta b/com.microsoft.mixedreality.webrtc/Runtime/Materials/ARGBFeedMaterial.mat.meta new file mode 100644 index 0000000..cd88243 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Materials/ARGBFeedMaterial.mat.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 4d29421bacacd4b4fb2963ac2f71afe5 +NativeFormatImporter: + externalObjects: {} + mainObjectFileID: 2100000 + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Materials/YUVFeedMaterial.mat b/com.microsoft.mixedreality.webrtc/Runtime/Materials/YUVFeedMaterial.mat new file mode 100644 index 0000000..bec605d --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Materials/YUVFeedMaterial.mat @@ -0,0 +1,90 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!21 &2100000 +Material: + serializedVersion: 6 + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_Name: YUVFeedMaterial + m_Shader: {fileID: 4800000, guid: f5704ed07eb02e5438d0b6f485061362, type: 3} + m_ShaderKeywords: + m_LightmapFlags: 4 + m_EnableInstancingVariants: 1 + m_DoubleSidedGI: 0 + m_CustomRenderQueue: -1 + stringTagMap: {} + disabledShaderPasses: [] + m_SavedProperties: + serializedVersion: 3 + m_TexEnvs: + - _BumpMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailAlbedoMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailMask: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _DetailNormalMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _EmissionMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _MainTex: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _MetallicGlossMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _OcclusionMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _ParallaxMap: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _UPlane: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _VPlane: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + - _YPlane: + m_Texture: {fileID: 0} + m_Scale: {x: 1, y: 1} + m_Offset: {x: 0, y: 0} + m_Floats: + - _BumpScale: 1 + - _Cutoff: 0.5 + - _DetailNormalMapScale: 1 + - _DstBlend: 0 + - _GlossMapScale: 1 + - _Glossiness: 0.5 + - _GlossyReflections: 1 + - _Metallic: 0 + - _Mirror: 0 + - _Mode: 0 + - _OcclusionStrength: 1 + - _Parallax: 0.02 + - _SmoothnessTextureChannel: 0 + - _SpecularHighlights: 1 + - _SrcBlend: 1 + - _UVSec: 0 + - _ZWrite: 1 + m_Colors: + - _Color: {r: 1, g: 1, b: 1, a: 1} + - _EmissionColor: {r: 0, g: 0, b: 0, a: 1} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Materials/YUVFeedMaterial.mat.meta b/com.microsoft.mixedreality.webrtc/Runtime/Materials/YUVFeedMaterial.mat.meta new file mode 100644 index 0000000..223cd4d --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Materials/YUVFeedMaterial.mat.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 82241a8371f7b1c4db3b05da484e9648 +NativeFormatImporter: + externalObjects: {} + mainObjectFileID: 0 + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Microsoft.MixedReality.WebRTC.Unity.asmdef b/com.microsoft.mixedreality.webrtc/Runtime/Microsoft.MixedReality.WebRTC.Unity.asmdef new file mode 100644 index 0000000..f54a45c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Microsoft.MixedReality.WebRTC.Unity.asmdef @@ -0,0 +1,12 @@ +{ + "name": "Microsoft.MixedReality.WebRTC.Unity", + "references": [], + "optionalUnityReferences": [], + "includePlatforms": [], + "excludePlatforms": [], + "allowUnsafeCode": true, + "overrideReferences": false, + "precompiledReferences": [], + "autoReferenced": true, + "defineConstraints": [] +} \ No newline at end of file diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Microsoft.MixedReality.WebRTC.Unity.asmdef.meta b/com.microsoft.mixedreality.webrtc/Runtime/Microsoft.MixedReality.WebRTC.Unity.asmdef.meta new file mode 100644 index 0000000..4442999 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Microsoft.MixedReality.WebRTC.Unity.asmdef.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 73d456c551e58394a94a92ad9cd538db +AssemblyDefinitionImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins.meta new file mode 100644 index 0000000..f34af2a --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 891c987abf0c3284299c7a567d6495cb +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA.meta new file mode 100644 index 0000000..7a7f935 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: a67ae1ff85560ec449930959330fe56b +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM.meta new file mode 100644 index 0000000..a2826d8 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: cd91048fd24d72a4db51fbabfe6a4221 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM/mrwebrtc.dll b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM/mrwebrtc.dll new file mode 100644 index 0000000..4b2d7a3 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM/mrwebrtc.dll differ diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM/mrwebrtc.dll.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM/mrwebrtc.dll.meta new file mode 100644 index 0000000..65c0976 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/ARM/mrwebrtc.dll.meta @@ -0,0 +1,94 @@ +fileFormatVersion: 2 +guid: 69f9b3e0684c398499cc494d5b384891 +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + '': Any + second: + enabled: 0 + settings: + Exclude Editor: 1 + Exclude Linux: 1 + Exclude Linux64: 1 + Exclude LinuxUniversal: 1 + Exclude OSXUniversal: 1 + Exclude Win: 1 + Exclude Win64: 1 + Exclude WindowsStoreApps: 0 + - first: + Any: + second: + enabled: 0 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + CPU: AnyCPU + DefaultValueInitialized: true + OS: AnyOS + - first: + Facebook: Win + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Facebook: Win64 + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Linux + second: + enabled: 0 + settings: + CPU: x86 + - first: + Standalone: Linux64 + second: + enabled: 0 + settings: + CPU: x86_64 + - first: + Standalone: OSXUniversal + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win64 + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 1 + settings: + CPU: ARM + DontProcess: false + PlaceholderPath: + SDK: UWP + ScriptingBackend: AnyScriptingBackend + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86.meta new file mode 100644 index 0000000..3923ceb --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 87283c81240e3c845ac10f528ea8d70e +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86/mrwebrtc.dll b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86/mrwebrtc.dll new file mode 100644 index 0000000..c9e49d2 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86/mrwebrtc.dll differ diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86/mrwebrtc.dll.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86/mrwebrtc.dll.meta new file mode 100644 index 0000000..7b1a536 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86/mrwebrtc.dll.meta @@ -0,0 +1,94 @@ +fileFormatVersion: 2 +guid: 253af5590ef9c674c96e0ba020b8ca2a +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + '': Any + second: + enabled: 0 + settings: + Exclude Editor: 1 + Exclude Linux: 1 + Exclude Linux64: 1 + Exclude LinuxUniversal: 1 + Exclude OSXUniversal: 1 + Exclude Win: 1 + Exclude Win64: 1 + Exclude WindowsStoreApps: 0 + - first: + Any: + second: + enabled: 0 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + CPU: AnyCPU + DefaultValueInitialized: true + OS: AnyOS + - first: + Facebook: Win + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Facebook: Win64 + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Linux + second: + enabled: 0 + settings: + CPU: x86 + - first: + Standalone: Linux64 + second: + enabled: 0 + settings: + CPU: x86_64 + - first: + Standalone: OSXUniversal + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win64 + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 1 + settings: + CPU: X86 + DontProcess: false + PlaceholderPath: + SDK: UWP + ScriptingBackend: AnyScriptingBackend + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64.meta new file mode 100644 index 0000000..8fd916e --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 86c4a2b485ca86c49bc51121c18352b7 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64/mrwebrtc.dll b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64/mrwebrtc.dll new file mode 100644 index 0000000..85c8b74 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64/mrwebrtc.dll differ diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64/mrwebrtc.dll.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64/mrwebrtc.dll.meta new file mode 100644 index 0000000..3126e56 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/WSA/x86_64/mrwebrtc.dll.meta @@ -0,0 +1,94 @@ +fileFormatVersion: 2 +guid: 25357693012347b4ba514e0974b57edb +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + '': Any + second: + enabled: 0 + settings: + Exclude Editor: 1 + Exclude Linux: 1 + Exclude Linux64: 1 + Exclude LinuxUniversal: 1 + Exclude OSXUniversal: 1 + Exclude Win: 1 + Exclude Win64: 1 + Exclude WindowsStoreApps: 0 + - first: + Any: + second: + enabled: 0 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + CPU: AnyCPU + DefaultValueInitialized: true + OS: AnyOS + - first: + Facebook: Win + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Facebook: Win64 + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Linux + second: + enabled: 0 + settings: + CPU: x86 + - first: + Standalone: Linux64 + second: + enabled: 0 + settings: + CPU: x86_64 + - first: + Standalone: OSXUniversal + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win64 + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 1 + settings: + CPU: X64 + DontProcess: false + PlaceholderPath: + SDK: UWP + ScriptingBackend: AnyScriptingBackend + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32.meta new file mode 100644 index 0000000..db16a14 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 9b7dc87675412e3498abe31bb3f21ea9 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86.meta new file mode 100644 index 0000000..c14019a --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 6a2e77d486b78dc4fb1db627622c466f +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86/mrwebrtc.dll b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86/mrwebrtc.dll new file mode 100644 index 0000000..006ff6a Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86/mrwebrtc.dll differ diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86/mrwebrtc.dll.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86/mrwebrtc.dll.meta new file mode 100644 index 0000000..25f7470 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86/mrwebrtc.dll.meta @@ -0,0 +1,107 @@ +fileFormatVersion: 2 +guid: 0d44fdee241b5c249a8ea819f5ef9e9a +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + '': Any + second: + enabled: 0 + settings: + Exclude Android: 1 + Exclude Editor: 0 + Exclude Linux: 1 + Exclude Linux64: 1 + Exclude LinuxUniversal: 1 + Exclude OSXUniversal: 1 + Exclude Win: 0 + Exclude Win64: 1 + Exclude WindowsStoreApps: 1 + - first: + Android: Android + second: + enabled: 0 + settings: + CPU: ARMv7 + - first: + Any: + second: + enabled: 0 + settings: {} + - first: + Editor: Editor + second: + enabled: 1 + settings: + CPU: x86 + DefaultValueInitialized: true + OS: Windows + - first: + Facebook: Win + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Facebook: Win64 + second: + enabled: 0 + settings: + CPU: None + - first: + Standalone: Linux + second: + enabled: 0 + settings: + CPU: x86 + - first: + Standalone: Linux64 + second: + enabled: 0 + settings: + CPU: x86_64 + - first: + Standalone: LinuxUniversal + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: OSXUniversal + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win + second: + enabled: 1 + settings: + CPU: AnyCPU + - first: + Standalone: Win64 + second: + enabled: 0 + settings: + CPU: None + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: X86 + DontProcess: false + PlaceholderPath: + SDK: AnySDK + ScriptingBackend: AnyScriptingBackend + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64.meta new file mode 100644 index 0000000..529f25c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 3628030434909ed449e803bbea25bbda +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/Microsoft.MixedReality.WebRTC.dll b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/Microsoft.MixedReality.WebRTC.dll new file mode 100644 index 0000000..63ea115 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/Microsoft.MixedReality.WebRTC.dll differ diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/Microsoft.MixedReality.WebRTC.dll.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/Microsoft.MixedReality.WebRTC.dll.meta new file mode 100644 index 0000000..72a4921 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/Microsoft.MixedReality.WebRTC.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: 9d631da17d93bc94ca472a4d7547e4f2 +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 1 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 1 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/mrwebrtc.dll b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/mrwebrtc.dll new file mode 100644 index 0000000..46458a3 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/mrwebrtc.dll differ diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/mrwebrtc.dll.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/mrwebrtc.dll.meta new file mode 100644 index 0000000..1d8328c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/Win32/x86_64/mrwebrtc.dll.meta @@ -0,0 +1,107 @@ +fileFormatVersion: 2 +guid: 39b57d3e3a74fe944826f061f56eb4ee +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + '': Any + second: + enabled: 0 + settings: + Exclude Android: 1 + Exclude Editor: 0 + Exclude Linux: 1 + Exclude Linux64: 1 + Exclude LinuxUniversal: 1 + Exclude OSXUniversal: 1 + Exclude Win: 1 + Exclude Win64: 0 + Exclude WindowsStoreApps: 1 + - first: + Android: Android + second: + enabled: 0 + settings: + CPU: ARMv7 + - first: + Any: + second: + enabled: 0 + settings: {} + - first: + Editor: Editor + second: + enabled: 1 + settings: + CPU: x86_64 + DefaultValueInitialized: true + OS: Windows + - first: + Facebook: Win + second: + enabled: 0 + settings: + CPU: None + - first: + Facebook: Win64 + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Linux + second: + enabled: 0 + settings: + CPU: x86 + - first: + Standalone: Linux64 + second: + enabled: 0 + settings: + CPU: x86_64 + - first: + Standalone: LinuxUniversal + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: OSXUniversal + second: + enabled: 0 + settings: + CPU: AnyCPU + - first: + Standalone: Win + second: + enabled: 0 + settings: + CPU: None + - first: + Standalone: Win64 + second: + enabled: 1 + settings: + CPU: AnyCPU + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + DontProcess: false + PlaceholderPath: + SDK: AnySDK + ScriptingBackend: AnyScriptingBackend + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a.meta new file mode 100644 index 0000000..9572c99 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 702ba65cbaecaec4db83b42719f4c0b1 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a/mrwebrtc.aar b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a/mrwebrtc.aar new file mode 100644 index 0000000..986d1f2 Binary files /dev/null and b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a/mrwebrtc.aar differ diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a/mrwebrtc.aar.meta b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a/mrwebrtc.aar.meta new file mode 100644 index 0000000..2e31603 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Plugins/arm64-v8a/mrwebrtc.aar.meta @@ -0,0 +1,32 @@ +fileFormatVersion: 2 +guid: f92671604373d0b4b8946b94842d6e91 +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Android: Android + second: + enabled: 1 + settings: {} + - first: + Any: + second: + enabled: 0 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts.meta new file mode 100644 index 0000000..d359461 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 8351cde9ca1c3c64684120f3b3c45cc5 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Android.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Android.cs new file mode 100644 index 0000000..7a9701f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Android.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + public static class Android + { + /// + /// Check if the Android interop layer for Android is already initialized. + /// + public static bool IsInitialized { get; private set; } = false; + + /// + /// Initialize the MixedReality-WebRTC library interop layer for Android. + /// + /// This is automatically called by the various library API functions, and + /// can be safely called multiple times (no-op after first call). + /// + public static void Initialize() + { +#if !UNITY_EDITOR && UNITY_ANDROID + if (IsInitialized) + { + return; + } + + // See webrtc/examples/unityplugin/ANDROID_INSTRUCTION + // Below is equivalent of this java code: + // PeerConnectionFactory.InitializationOptions.Builder builder = PeerConnectionFactory.InitializationOptions.Builder(UnityPlayer.currentActivity); + // builder.setNativeLibraryName("mrwebrtc"); + // PeerConnectionFactory.InitializationOptions options = builder.createInitializationOptions(); + // PeerConnectionFactory.initialize(options); + AndroidJavaClass playerClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer"); + Debug.Assert(playerClass != null); + AndroidJavaObject activity = playerClass.GetStatic("currentActivity"); + Debug.Assert(activity != null); + Debug.Log("Found Unity Java activity."); + AndroidJavaClass webrtcClass = new AndroidJavaClass("org.webrtc.PeerConnectionFactory"); + Debug.Assert(webrtcClass != null); + AndroidJavaClass initOptionsClass = new AndroidJavaClass("org.webrtc.PeerConnectionFactory$InitializationOptions"); + Debug.Assert(initOptionsClass != null); + AndroidJavaObject builder = initOptionsClass.CallStatic("builder", new object[1] { activity }); + Debug.Assert(builder != null); + builder.Call("setNativeLibraryName", new object[1] { "mrwebrtc" }); + AndroidJavaObject options = builder.Call("createInitializationOptions"); + webrtcClass.CallStatic("initialize", new object[1] { options }); + IsInitialized = true; + Debug.Log("Initialized MixedReality-WebRTC Java binding for Android."); +#endif + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Android.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Android.cs.meta new file mode 100644 index 0000000..0641c52 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Android.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 71cf2449ddf984f45a44a0cefb983a31 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes.meta new file mode 100644 index 0000000..3ac568b --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 3a07576d8352096448e6eeccfa4e2024 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/CaptureCameraAttribute.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/CaptureCameraAttribute.cs new file mode 100644 index 0000000..69268ae --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/CaptureCameraAttribute.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; +using UnityEngine.XR; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Attribute for a property used by + /// to capture the content of a framebuffer, and for which some constraints on stereoscopic rendering + /// options need to be enforced (and errors can be reported in the Editor if they are not followed). + /// + /// + public class CaptureCameraAttribute : PropertyAttribute + { + /// + /// Validate that a given instance can be used for framebuffer + /// capture by based on the XR settings currently in effect. + /// + /// The camera instance to test the settings of. + /// + /// The camera has settings not compatible with its use with . + /// + /// + public static void Validate(Camera camera) + { + if (camera != null) + { + if (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.MultiPass) + { + // Ensure camera is not rendering to both eyes in multi-pass stereo, otherwise the command buffer + // is executed twice (once per eye) and will produce twice as many frames, which leads to stuttering + // when playing back the video stream resulting from combining those frames. + if (camera.stereoTargetEye == StereoTargetEyeMask.Both) + { + throw new NotSupportedException("Capture camera renders both eyes in multi-pass stereoscopic rendering. This is not" + + " supported by the capture mechanism which cannot discriminate them. Set Camera.stereoTargetEye to either Left or" + + " Right, or use a different XRSettings.stereoRenderingMode."); + } + } +#if !UNITY_2019_1_OR_NEWER + else if ((XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassInstanced) + || (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassMultiview)) // same as instanced (OpenGL) + { + throw new NotSupportedException("Capture camera does not support single-pass instanced stereoscopic rendering before Unity 2019.1."); + } +#endif + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/CaptureCameraAttribute.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/CaptureCameraAttribute.cs.meta new file mode 100644 index 0000000..8591772 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/CaptureCameraAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fb1356a40780d844785dd91b4ae7e9bf +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/EditorAttributes.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/EditorAttributes.cs new file mode 100644 index 0000000..efe7b26 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/EditorAttributes.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEngine; + +// This file contains a collection of attributes only used for editing purpose, +// generally to customize the Inspector window. They need to be located in the +// runtime assembly to be attached to runtime object fields, but do not influence +// their runtime behavior. + +namespace Microsoft.MixedReality.WebRTC.Unity.Editor +{ + /// + /// Attribute to display a boolean field with a toggle on its left, prefixing + /// the actual text of the field. + /// + public class ToggleLeftAttribute : PropertyAttribute + { + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/EditorAttributes.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/EditorAttributes.cs.meta new file mode 100644 index 0000000..f1211f1 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/EditorAttributes.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 9233eb3e4d94ea547abfc93030c3dfcc +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/SdpTokenAttribute.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/SdpTokenAttribute.cs new file mode 100644 index 0000000..bf22bf4 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/SdpTokenAttribute.cs @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Text.RegularExpressions; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Attribute for string properties representing an SDP token, which has constraints + /// on the allowed characters it can contain, as defined in the SDP RFC. + /// + /// See https://tools.ietf.org/html/rfc4566#page-43 for details. + /// + public class SdpTokenAttribute : PropertyAttribute + { + /// + /// Allow empty tokens, that is a string property which is null or an empty string. + /// This is not valid in the RFC, but can be allowed as a property value to represent a default + /// value generated at runtime by the implementation instead of being provided by the user. + /// + /// This is typically used as an argument to . + /// + /// true to allow the property to be null or empty. + public bool AllowEmpty { get; } + + /// Value of . + public SdpTokenAttribute(bool allowEmpty = true) + { + AllowEmpty = allowEmpty; + } + + /// + /// Validate an SDP token name against the list of allowed characters: + /// - Symbols [!#$%'*+-.^_`{|}~&] + /// - Alphanumerical characters [A-Za-z0-9] + /// + /// If the validation fails, the method throws an exception. + /// + /// + /// See https://tools.ietf.org/html/rfc4566#page-43 for 'token' reference. + /// + /// The token name to validate. + /// + /// true to allow the property to be null or empty without raising an exception. + /// + /// + /// is null or empty, and is false. + /// + /// + /// contains invalid characters not allowed for a SDP 'token' item. + /// + public static void Validate(string name, bool allowEmpty = true) + { + if (string.IsNullOrEmpty(name)) + { + if (allowEmpty) + { + return; + } + throw new ArgumentNullException("Invalid null SDP token."); + } + + var regex = new Regex("^[A-Za-z0-9!#$%&'*+-.^_`{|}~]+$"); + if (regex.IsMatch(name)) + { + return; + } + + throw new ArgumentException($"SDP token '{name}' contains invalid characters."); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/SdpTokenAttribute.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/SdpTokenAttribute.cs.meta new file mode 100644 index 0000000..d0b980f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Attributes/SdpTokenAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: aaf6d8dd252cb2145adc737313c39834 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media.meta new file mode 100644 index 0000000..95bab77 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 0140a508c0b94144780f23ed0b892271 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioReceiver.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioReceiver.cs new file mode 100644 index 0000000..be60d5c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioReceiver.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; +using UnityEngine.Events; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Unity event corresponding to a new audio stream being started. + /// + [Serializable] + public class AudioStreamStartedEvent : UnityEvent + { }; + + /// + /// Unity event corresponding to an on-going audio stream being stopped. + /// + [Serializable] + public class AudioStreamStoppedEvent : UnityEvent + { }; + + /// + /// Endpoint for a WebRTC remote audio track. + /// + /// + /// Setting this on an audio will enable the corresponding transceiver to receive. + /// A remote track will be exposed through once a connection is established. + /// The audio track can optionally be played locally with an . + /// + [AddComponentMenu("MixedReality-WebRTC/Audio Receiver")] + public class AudioReceiver : MediaReceiver + { + /// + /// Remote audio track receiving data from the remote peer. + /// + /// + /// This is null until: + /// + /// is set to a non-null value, and + /// the remote peer starts sending data to the paired transceiver after a session negotiation. + /// + /// + public RemoteAudioTrack AudioTrack { get; private set; } + + /// + /// Event raised when the audio stream started. + /// + /// When this event is raised, the followings are true: + /// - The property is a valid remote audio track. + /// - The property is true. + /// + /// + /// This event is raised from the main Unity thread to allow Unity object access. + /// + public AudioStreamStartedEvent AudioStreamStarted = new AudioStreamStartedEvent(); + + /// + /// Event raised when the audio stream stopped. + /// + /// When this event is raised, the followings are true: + /// - The property is false. + /// + /// + /// This event is raised from the main Unity thread to allow Unity object access. + /// + public AudioStreamStoppedEvent AudioStreamStopped = new AudioStreamStoppedEvent(); + + /// + public override MediaKind MediaKind => MediaKind.Audio; + /// + public override MediaTrack Track => AudioTrack; + + /// + protected internal override void OnPaired(MediaTrack track) + { + var remoteAudioTrack = (RemoteAudioTrack)track; + + Debug.Assert(Track == null); + AudioTrack = remoteAudioTrack; + AudioStreamStarted.Invoke(remoteAudioTrack); + } + + /// + protected internal override void OnUnpaired(MediaTrack track) + { + Debug.Assert(Track == track); + AudioTrack = null; + AudioStreamStopped.Invoke((RemoteAudioTrack)track); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioReceiver.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioReceiver.cs.meta new file mode 100644 index 0000000..4722c22 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioReceiver.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ac54f2f6b8185d444b4a85e048778344 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioRenderer.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioRenderer.cs new file mode 100644 index 0000000..59d75a7 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioRenderer.cs @@ -0,0 +1,171 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Collections.Generic; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Utility component used to play audio frames obtained from a WebRTC audio source. + /// + /// + /// Calling and + /// will start/stop playing the passed through a + /// component on the same object, if there is one. + /// + /// The component will play only while enabled. + /// + /// + [AddComponentMenu("MixedReality-WebRTC/Audio Renderer")] + [RequireComponent(typeof(UnityEngine.AudioSource))] + public class AudioRenderer : MonoBehaviour + { + /// + /// If true, pad buffer underruns with a sine wave. This will cause artifacts on underruns. + /// Use for debugging. + /// + public bool PadWithSine = false; + + // Local storage of audio data to be fed to the output + private AudioTrackReadBuffer _readBuffer = null; + + // _readBuffer can be accessed concurrently by audio thread (OnAudioFilterRead) + // and main thread (StartStreaming, StopStreaming). + private readonly object _readBufferLock = new object(); + + // Cached sample rate since we can't access this in OnAudioFilterRead. + private int _audioSampleRate = 0; + + + // Source that this renderer is currently subscribed to. + private IAudioSource _source; + + protected void Awake() + { + AudioSettings.OnAudioConfigurationChanged += OnAudioConfigurationChanged; + OnAudioConfigurationChanged(deviceWasChanged: true); + } + + protected void OnDestroy() + { + AudioSettings.OnAudioConfigurationChanged -= OnAudioConfigurationChanged; + } + + protected void OnEnable() + { + if (_source != null) + { + StartReadBuffer(); + } + } + + protected void OnDisable() + { + if (_source != null) + { + StopReadBuffer(); + } + } + + /// + /// Start rendering the passed source. + /// + /// + /// Can be used to handle . + /// + public void StartRendering(IAudioSource source) + { + Debug.Assert(_source == null); + _source = source; + + if (isActiveAndEnabled) + { + StartReadBuffer(); + } + } + + /// + /// Stop rendering the passed source. Must be called with the same source passed to + /// + /// + /// Can be used to handle . + /// + public void StopRendering(IAudioSource source) + { + Debug.Assert(_source == source); + if (isActiveAndEnabled) + { + StopReadBuffer(); + } + _source = null; + } + + protected void OnAudioFilterRead(float[] data, int channels) + { + var behavior = PadWithSine ? + AudioTrackReadBuffer.PadBehavior.PadWithSine : + AudioTrackReadBuffer.PadBehavior.PadWithZero; + bool hasRead = false; + bool hasOverrun = false; + bool hasUnderrun = false; + + lock (_readBufferLock) + { + // Read and use buffer under lock to prevent disposal while in use. + if (_readBuffer != null) + { + _readBuffer.Read(_audioSampleRate, channels, data, + out int numSamplesRead, out hasOverrun, behavior); + hasRead = true; + hasUnderrun = numSamplesRead < data.Length; + } + } + + if (hasRead) + { + // Uncomment for debugging. + //if (hasOverrun) + //{ + // Debug.LogWarning($"Overrun in track {Track.Name}"); + //} + //if (hasUnderrun) + //{ + // Debug.LogWarning($"Underrun in track {Track.Name}"); + //} + + return; + } + + // If there is no track/buffer, fill array with 0s. + for (int i = 0; i < data.Length; ++i) + { + data[i] = 0.0f; + } + } + + private void OnAudioConfigurationChanged(bool deviceWasChanged) + { + _audioSampleRate = AudioSettings.outputSampleRate; + } + + private void StartReadBuffer() + { + Debug.Assert(_readBuffer == null); + + // OnAudioFilterRead reads the variable concurrently, but the update is atomic + // so we don't need a lock. + _readBuffer = _source.CreateReadBuffer(); + } + + private void StopReadBuffer() + { + lock (_readBufferLock) + { + // Under lock so OnAudioFilterRead won't use the buffer while/after it is disposed. + _readBuffer.Dispose(); + _readBuffer = null; + } + } +} +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioRenderer.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioRenderer.cs.meta new file mode 100644 index 0000000..3fd2a2b --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioRenderer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ecc506ef6d3213043bc0529b3f591d39 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioTrackSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioTrackSource.cs new file mode 100644 index 0000000..146c67c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioTrackSource.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// This component represents an audio track source generating audio frames for one or more + /// audio tracks. + /// + /// + public abstract class AudioTrackSource : MediaTrackSource + { + /// + /// Audio track source object from the underlying C# library that this component encapsulates. + /// + /// The object is owned by this component, which will create it and dispose of it automatically. + /// + public WebRTC.AudioTrackSource Source { get; private set; } = null; + + /// + public override MediaKind MediaKind => MediaKind.Audio; + + /// + public override bool IsLive => Source != null; + + protected void AttachSource(WebRTC.AudioTrackSource source) + { + Source = source; + AttachToMediaLines(); + } + + protected void DisposeSource() + { + if (Source != null) + { + DetachFromMediaLines(); + + // Audio track sources are disposable objects owned by the user (this component) + Source.Dispose(); + Source = null; + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioTrackSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioTrackSource.cs.meta new file mode 100644 index 0000000..22e9d49 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/AudioTrackSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fc15459d5ebc59246a46627053e01b30 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/CustomVideoSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/CustomVideoSource.cs new file mode 100644 index 0000000..475bbf4 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/CustomVideoSource.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Diagnostics; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Abstract base component for a custom video source delivering raw video frames + /// directly to the WebRTC implementation. + /// + public abstract class CustomVideoSource : VideoTrackSource where T : IVideoFrameStorage + { + protected virtual void OnEnable() + { + Debug.Assert(Source == null); + + // Create the external source + //< TODO - Better abstraction + if (typeof(T) == typeof(I420AVideoFrameStorage)) + { + AttachSource(ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested)); + } + else if (typeof(T) == typeof(Argb32VideoFrameStorage)) + { + AttachSource(ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested)); + } + else + { + throw new NotSupportedException("This frame storage is not supported. Use I420AVideoFrameStorage or Argb32VideoFrameStorage."); + } + } + + protected virtual void OnDisable() + { + DisposeSource(); + } + + protected abstract void OnFrameRequested(in FrameRequest request); + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/CustomVideoSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/CustomVideoSource.cs.meta new file mode 100644 index 0000000..233c782 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/CustomVideoSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 26676644c28327f42945c1e5b78fb210 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaLine.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaLine.cs new file mode 100644 index 0000000..bb0505f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaLine.cs @@ -0,0 +1,460 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Media line abstraction for a peer connection. + /// + /// This container binds together a source component () and/or a receiver + /// component () on one side, with a transceiver on the other side. The media line + /// is a declarative representation of this association, which is then turned into a binding by the implementation + /// during an SDP negotiation. This forms the core of the algorithm allowing automatic transceiver pairing + /// between the two peers based on the declaration of intent of the user. + /// + /// Assigning Unity components to the and properties serves + /// as an indication of the user intent to send and/or receive media through the transceiver, and is + /// used during the SDP exchange to derive the to negotiate. + /// After the SDP negotiation is completed, the property refers to the transceiver + /// associated with this media line, and which the sender and receiver will use. + /// + /// Users typically interact with this class through the peer connection transceiver collection in the Unity + /// inspector window, though direct manipulation via code is also possible. + /// + [Serializable] + public class MediaLine + { + /// + /// Kind of media of the media line and its attached transceiver. + /// + /// This is assiged when the media line is created with + /// and is immutable for the lifetime of the peer connection. + /// + public MediaKind MediaKind => _mediaKind; + + /// + /// Media source producing the media to send through the transceiver attached to this media line. + /// + /// + /// This must be an instance of a class derived from or + /// depending on whether is + /// or , respectively. + /// + /// Internally the peer connection will automatically create and manage a media track to bridge the + /// media source with the transceiver. + /// + /// If this is non-null then the peer connection will negotiate sending some media, otherwise + /// it will signal the remote peer that it does not wish to send (receive-only or inactive). + /// + /// If is valid, that is a first session negotiation has already been completed, + /// then changing this value raises a event on the + /// peer connection of . + /// + /// Must be changed on the main Unity app thread. + /// + public MediaTrackSource Source + { + get { return _source; } + set + { + if (_source == value) + { + return; + } + if (value != null && value.MediaKind != MediaKind) + { + throw new ArgumentException("Wrong media kind", nameof(Receiver)); + } + + var oldTrack = LocalTrack; + if (_source != null && _peer.IsAwake) + { + _source.OnRemovedFromMediaLine(this); + } + _source = value; + if (_source != null && _peer.IsAwake) + { + _source.OnAddedToMediaLine(this); + CreateLocalTrackIfNeeded(); + } + // Dispose the old track *after* replacing it with the new one + // so that there is no gap in sending. + oldTrack?.Dispose(); + + // Whatever the change, keep the direction consistent. + UpdateTransceiverDesiredDirection(); + } + } + + /// + /// Name of the local media track this component will create when calling . + /// If left empty, the implementation will generate a unique name for the track (generally a GUID). + /// + /// + /// This value must comply with the 'msid' attribute rules as defined in + /// https://tools.ietf.org/html/draft-ietf-mmusic-msid-05#section-2, which in + /// particular constraints the set of allowed characters to those allowed for a + /// 'token' element as specified in https://tools.ietf.org/html/rfc4566#page-43: + /// - Symbols [!#$%'*+-.^_`{|}~] and ampersand & + /// - Alphanumerical characters [A-Za-z0-9] + /// + /// Users can manually test if a string is a valid SDP token with the utility method + /// . The property setter will + /// use this and throw an if the token is not a valid + /// SDP token. + /// + /// The sender track name is taken into account each time the track is created. If this + /// property is assigned after the track was created (already negotiated), the value will + /// be used only for the next negotiation, and the current sender track will keep its + /// current track name (either a previous value or a generated one). + /// + /// + public string SenderTrackName + { + get { return _senderTrackName; } + set + { + SdpTokenAttribute.Validate(_senderTrackName); + _senderTrackName = value; + } + } + + /// + /// Local track created from a local source. + /// + /// + /// This is non-null when a live source is attached to the , and the owning + /// is connected. + /// + public LocalMediaTrack LocalTrack => Transceiver?.LocalTrack; + + /// + /// Media receiver consuming the media received through the transceiver attached to this media line. + /// + /// + /// This must be an instance of a class derived from or + /// depending on whether is + /// or , respectively. + /// + /// If this is non-null then the peer connection will negotiate receiving some media, otherwise + /// it will signal the remote peer that it does not wish to receive (send-only or inactive). + /// + /// If is valid, that is a first session negotiation has already been conducted, + /// then changing this value raises a event on the + /// peer connection of . + /// + /// Must be changed on the main Unity app thread. + /// + public MediaReceiver Receiver + { + get { return _receiver; } + set + { + if (_receiver == value) + { + return; + } + if (value != null && value.MediaKind != MediaKind) + { + throw new ArgumentException("Wrong media kind", nameof(Receiver)); + } + + if (_receiver != null && _peer.IsAwake) + { + if (_remoteTrack != null) + { + _receiver.OnUnpaired(_remoteTrack); + } + _receiver.OnRemovedFromMediaLine(this); + } + _receiver = value; + if (_receiver != null && _peer.IsAwake) + { + _receiver.OnAddedToMediaLine(this); + if (_remoteTrack != null) + { + _receiver.OnPaired(_remoteTrack); + } + } + + // Whatever the change, keep the direction consistent. + UpdateTransceiverDesiredDirection(); + } + } + + /// + /// Transceiver attached with this media line. + /// + /// On the offering peer this changes during , while this is updated by + /// when receiving an offer on the answering peer. + /// + /// Because transceivers cannot be destroyed, once this property is assigned a non-null value it keeps that + /// value until the peer connection owning the media line is closed. + /// + public Transceiver Transceiver { get; private set; } + + /// + /// owning this . + /// + public PeerConnection Peer + { + get => _peer; + internal set + { + Debug.Assert(Peer == null || Peer == value); + _peer = value; + } + } + + #region Private fields + private PeerConnection _peer; + + /// + /// Backing field to serialize the property. + /// + /// + [SerializeField] + private MediaKind _mediaKind; + + /// + /// Backing field to serialize the property. + /// + /// + [SerializeField] + private MediaTrackSource _source; + + /// + /// Backing field to serialize the property. + /// + /// + [SerializeField] + private MediaReceiver _receiver; + + /// + /// Backing field to serialize the sender track's name. + /// + [SerializeField] + [Tooltip("SDP track name")] + [SdpToken(allowEmpty: true)] + private string _senderTrackName; + + // Cache for the remote track opened by the latest negotiation. + // Comparing it to Transceiver.RemoteTrack will tell if streaming has just started/stopped. + private MediaTrack _remoteTrack; + + #endregion + + + /// + /// Constructor called internally by . + /// + /// Immutable value assigned to the property on construction. + internal MediaLine(PeerConnection peer, MediaKind kind) + { + Peer = peer; + _mediaKind = kind; + } + + private void UpdateTransceiverDesiredDirection() + { + if (Transceiver != null) + { + // Avoid races on the desired direction by limiting changes to the main thread. + // Note that EnsureIsMainAppThread cannot be used if _peer is not awake, so only + // check when there is a transceiver (meaning _peer is enabled). + Peer.EnsureIsMainAppThread(); + + bool wantsSend = _source != null && _source.IsLive; + bool wantsRecv = (_receiver != null); + Transceiver.DesiredDirection = Transceiver.DirectionFromSendRecv(wantsSend, wantsRecv); + } + } + + // Initializes and attaches a local track if all the preconditions are satisfied. + private void CreateLocalTrackIfNeeded() + { + if (_source != null && _source.IsLive && Transceiver != null) + { + if (MediaKind == MediaKind.Audio) + { + var audioSource = (AudioTrackSource)_source; + + var initConfig = new LocalAudioTrackInitConfig + { + trackName = _senderTrackName + }; + var audioTrack = LocalAudioTrack.CreateFromSource(audioSource.Source, initConfig); + Transceiver.LocalAudioTrack = audioTrack; + } + else + { + Debug.Assert(MediaKind == MediaKind.Video); + var videoSource = (VideoTrackSource)_source; + + var initConfig = new LocalVideoTrackInitConfig + { + trackName = _senderTrackName + }; + var videoTrack = LocalVideoTrack.CreateFromSource(videoSource.Source, initConfig); + Transceiver.LocalVideoTrack = videoTrack; + } + } + } + + // Detaches and disposes the local track if there is one. + private void DestroyLocalTrackIfAny() + { + var localTrack = Transceiver?.LocalTrack; + if (localTrack != null) + { + if (MediaKind == MediaKind.Audio) + { + Transceiver.LocalAudioTrack = null; + } + else + { + Debug.Assert(MediaKind == MediaKind.Video); + Transceiver.LocalVideoTrack = null; + } + localTrack.Dispose(); + } + } + + internal void UpdateAfterSdpReceived() + { + Debug.Assert(Transceiver != null); + + // Callbacks must be called on the main Unity app thread. + Peer.EnsureIsMainAppThread(); + + var newRemoteTrack = Transceiver.RemoteTrack; + if (_receiver != null) + { + bool wasReceiving = _remoteTrack != null; + bool isReceiving = newRemoteTrack != null; + if (isReceiving && !wasReceiving) + { + // Transceiver started receiving, and user actually wants to receive + _receiver.OnPaired(newRemoteTrack); + } + else if (!isReceiving && wasReceiving) + { + // Transceiver stopped receiving (user intent does not matter here) + _receiver.OnUnpaired(_remoteTrack); + } + } + _remoteTrack = newRemoteTrack; + } + + /// + /// Pair the given transceiver with the current media line. + /// + /// The transceiver to pair with. + /// + /// The transceiver associated in the offer with the same media line index as the current media line + /// has a different media kind than the media line. This is generally a result of the two peers having + /// mismatching media line configurations. + /// + internal void PairTransceiver(Transceiver tr) + { + Peer.EnsureIsMainAppThread(); + + Debug.Assert(tr != null); + Debug.Assert(Transceiver == null); + + // Check consistency before assigning + if (tr.MediaKind != MediaKind) + { + throw new InvalidTransceiverMediaKindException(); + } + Transceiver = tr; + + // Initialize the transceiver direction in sync with Sender and Receiver. + UpdateTransceiverDesiredDirection(); + + // Start the local track if there is a live source. + CreateLocalTrackIfNeeded(); + } + + internal void UnpairTransceiver() + { + Peer.EnsureIsMainAppThread(); + + // Notify the receiver. + if (_remoteTrack != null && _receiver != null) + { + _receiver.OnUnpaired(_remoteTrack); + } + _remoteTrack = null; + + DestroyLocalTrackIfAny(); + + Transceiver = null; + } + + /// + /// Internal callback when the underlying source providing media frames to the sender track + /// is created, and therefore the local media track needs to be created too. + /// + /// + /// + internal void AttachSource() + { + Debug.Assert(Source.IsLive); + CreateLocalTrackIfNeeded(); + UpdateTransceiverDesiredDirection(); + } + + /// + /// Internal callback when the underlying source providing media frames to the sender track + /// is destroyed, and therefore the local media track needs to be destroyed too. + /// + /// + /// + internal void DetachSource() + { + Debug.Assert(Source.IsLive); + DestroyLocalTrackIfAny(); + UpdateTransceiverDesiredDirection(); + } + + internal void OnReceiverDestroyed() + { + // Different from `Receiver = null`. Don't need to call Receiver.OnRemovedFromMediaLine + // or Receiver.OnUnpaired since the Receiver itself has called this. + _receiver = null; + UpdateTransceiverDesiredDirection(); + } + + // Called by PeerConnection.Awake. + internal void Awake() + { + if (_source) + { + // Fill the list of media lines for the source. + _source.OnAddedToMediaLine(this); + } + if (_receiver) + { + _receiver.OnAddedToMediaLine(this); + } + } + + // Called by PeerConnection.OnDestroy. + internal void OnDestroy() + { + if (_source) + { + // Fill the list of media lines for the source. + _source.OnRemovedFromMediaLine(this); + } + if (_receiver) + { + _receiver.OnRemovedFromMediaLine(this); + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaLine.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaLine.cs.meta new file mode 100644 index 0000000..9db04dc --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaLine.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6e4097ad19fee6444b3ddd95e13e0bf2 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaReceiver.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaReceiver.cs new file mode 100644 index 0000000..1df6c8e --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaReceiver.cs @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Base class for media producers generating frames by receiving them from a remote peer. + /// + public abstract class MediaReceiver : MonoBehaviour + { + /// + /// Media kind of the receiver. + /// + public abstract MediaKind MediaKind { get; } + + /// + /// Remote track associated with this receiver. + /// null if this object is not receiving at this time. + /// + /// + /// This is always a or a + /// + public abstract MediaTrack Track { get; } + + /// + /// Is the media source currently producing frames received from the remote peer? + /// This is true while the remote media track exists, which is notified by + /// events on the or . + /// + public bool IsLive => Track != null; + + /// + /// Transceiver this receiver is paired with, if any. + /// + /// This is null until a remote description is applied which pairs the media line + /// this receiver is associated with to a transceiver, or until the peer connection of this + /// receiver's media line creates the receiver right before creating an SDP offer. + /// + public Transceiver Transceiver => MediaLine?.Transceiver; + + /// + /// Media line this receiver is paired with, if any. + /// + /// + /// Note that this is set to the connected only if the owning + /// is awake. This will be automatically reset if the + /// owning the is destroyed. + /// + public MediaLine MediaLine { get; private set; } + + /// + /// Internal callback invoked when the media receiver is assigned to a media line. + /// + /// The new media line this receiver is assigned to. + protected internal virtual void OnAddedToMediaLine(MediaLine mediaLine) + { + Debug.Assert(MediaLine == null); + MediaLine = mediaLine; + } + + /// + /// Internal callback invoked when the media receiver is de-assigned from a media line. + /// + /// The old media line this receiver was assigned to. + protected internal virtual void OnRemovedFromMediaLine(MediaLine mediaLine) + { + Debug.Assert(MediaLine == mediaLine); + MediaLine = null; + } + + /// + /// Internal callback invoked when the receiver is paired with a media track. + /// + /// + /// This will be called on the Unity update thread. + /// + /// The media track this receiver is paired with. + protected internal virtual void OnPaired(MediaTrack track) { } + + /// + /// Internal callback invoked when the receiver is unpaired from a media track. + /// + /// + /// This will be called on the Unity update thread. + /// + /// The media track this receiver was paired with. + protected internal virtual void OnUnpaired(MediaTrack track) { } + + protected void OnDestroy() + { + MediaLine?.OnReceiverDestroyed(); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaReceiver.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaReceiver.cs.meta new file mode 100644 index 0000000..f8ed79b --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaReceiver.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 14c95be69b6ca7c4fa178cfa7c465745 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaTrackSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaTrackSource.cs new file mode 100644 index 0000000..c57ba2d --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaTrackSource.cs @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Collections.Generic; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Base class for media track source components producing some media frames locally. + /// + /// + /// + public abstract class MediaTrackSource : MonoBehaviour + { + /// + /// Media kind of the track source. + /// + public abstract MediaKind MediaKind { get; } + + /// + /// Indicates if the source is currently producing frames. + /// + public abstract bool IsLive { get; } + + /// + /// List of audio media lines using this source. + /// + /// + /// Note that a connected will be added to this only if the owning + /// is awake. A will be automatically + /// removed if the owning is destroyed. + /// + public IReadOnlyList MediaLines => _mediaLines; + private readonly List _mediaLines = new List(); + + internal void OnAddedToMediaLine(MediaLine mediaLine) + { + Debug.Assert(!_mediaLines.Contains(mediaLine)); + _mediaLines.Add(mediaLine); + } + + internal void OnRemovedFromMediaLine(MediaLine mediaLine) + { + bool removed = _mediaLines.Remove(mediaLine); + Debug.Assert(removed); + } + + protected void AttachToMediaLines() + { + foreach (var ml in _mediaLines) + { + ml.AttachSource(); + } + } + + protected void DetachFromMediaLines() + { + foreach (var ml in _mediaLines) + { + ml.DetachSource(); + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaTrackSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaTrackSource.cs.meta new file mode 100644 index 0000000..6b162c5 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MediaTrackSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 909a2a3190166db4ea5ea51d02afa795 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MicrophoneSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MicrophoneSource.cs new file mode 100644 index 0000000..ca34809 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MicrophoneSource.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using Microsoft.MixedReality.WebRTC.Unity.Editor; +using UnityEngine; + +#if !UNITY_EDITOR && UNITY_ANDROID +using UnityEngine.Android; +#endif + +#if UNITY_WSA && !UNITY_EDITOR +using System.Threading.Tasks; +using global::Windows.UI.Core; +using global::Windows.Foundation; +using global::Windows.Media.Core; +using global::Windows.Media.Capture; +using global::Windows.ApplicationModel.Core; +#endif + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// This component represents a local audio source generating audio frames from a local + /// audio capture device (microphone). The audio source can be used to create one or more + /// audio tracks sharing the same audio content. + /// + [AddComponentMenu("MixedReality-WebRTC/Microphone Source")] + public class MicrophoneSource : AudioTrackSource + { + public bool AutoGainControl => _autoGainControl; + + [SerializeField] + [Tooltip("Enable automated gain control")] + [ToggleLeft] + protected bool _autoGainControl = true; + +#if !UNITY_EDITOR && UNITY_ANDROID + protected bool _androidRecordAudioRequestPending = false; + protected float _androidRecordAudioRequestRetryUntilTime = 0f; +#endif + + protected async void OnEnable() + { + if (Source != null) + { + return; + } + +#if !UNITY_EDITOR && UNITY_ANDROID + // Ensure Android binding is initialized before accessing the native implementation + Android.Initialize(); + + // Check for permission to access the camera + if (!Permission.HasUserAuthorizedPermission(Permission.Microphone)) + { + if (!_androidRecordAudioRequestPending) + { + // Monitor the OnApplicationFocus(true) event during the next 5 minutes, + // and check for permission again each time (see below why). + _androidRecordAudioRequestPending = true; + _androidRecordAudioRequestRetryUntilTime = Time.time + 300; + + // Display dialog requesting user permission. This will return immediately, + // and unfortunately there's no good way to tell when this completes. As a rule + // of thumb, application should lose focus, so check when focus resumes should + // be sufficient without having to poll every frame. + Permission.RequestUserPermission(Permission.Microphone); + } + return; + } +#endif + +#if UNITY_WSA && !UNITY_EDITOR + // Request access to audio capture. The OS may show some popup dialog to the + // user to request permission. This will succeed only if the user approves it. + try + { + if (UnityEngine.WSA.Application.RunningOnUIThread()) + { + await RequestAccessAsync(); + } + else + { + UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true); + } + } + catch (Exception ex) + { + // Log an error and prevent activation + Debug.LogError($"Audio access failure: {ex.Message}."); + this.enabled = false; + return; + } +#endif + + var initConfig = new LocalAudioDeviceInitConfig + { + AutoGainControl = _autoGainControl, + }; + try + { + AttachSource(await DeviceAudioTrackSource.CreateAsync(initConfig)); + } + catch (Exception ex) + { + Debug.LogError($"Failed to create device track source for {nameof(MicrophoneSource)} component '{name}'."); + Debug.LogException(ex, this); + return; + } + } + +#if !UNITY_EDITOR && UNITY_ANDROID + protected void OnApplicationFocus(bool hasFocus) + { + if (!hasFocus) + { + return; + } + + // If focus is restored after a pending request, check the permission again + if (_androidRecordAudioRequestPending) + { + _androidRecordAudioRequestPending = false; + + if (Permission.HasUserAuthorizedPermission(Permission.Microphone)) + { + // If now authorized, start capture as if just enabled + Debug.Log("User granted authorization to access microphone, starting MicrophoneSource now..."); + OnEnable(); + } + else if (Time.time <= _androidRecordAudioRequestRetryUntilTime) + { + // OnApplicationFocus(true) may be called for unrelated reason(s) so do not disable on first call, + // but instead retry during a given period after the request was made, until we're reasonably + // confident that the user dialog was actually answered (that is, that OnApplicationFocus(true) was + // called because of that dialog, and not because of another reason). + // This may lead to false positives (checking permission after the user denied it), but the user + // dialog will not popup again, so this is all in the background and essentially harmless. + _androidRecordAudioRequestPending = true; + } + else + { + // Some reasonable time passed since we made the permission request, and we still get a denied + // answer, so assume the user actually denied it and stop retrying. + _androidRecordAudioRequestRetryUntilTime = 0f; + Debug.LogError("User denied RecordAudio (microphone) permission; cannot use MicrophoneSource. Forcing enabled=false."); + enabled = false; + } + } + } +#endif + + protected void OnDisable() + { + DisposeSource(); + } + +#if UNITY_WSA && !UNITY_EDITOR + /// + /// Internal UWP helper to ensure device access. + /// + /// + /// This must be called from the main UWP UI thread (not the main Unity app thread). + /// + private Task RequestAccessAsync() + { + // On UWP the app must have the "microphone" capability, and the user must allow microphone + // access. So check that access before trying to initialize the WebRTC library, as this + // may result in a popup window being displayed the first time, which needs to be accepted + // before the microphone can be accessed by WebRTC. + var mediaAccessRequester = new MediaCapture(); + var mediaSettings = new MediaCaptureInitializationSettings(); + mediaSettings.AudioDeviceId = ""; + mediaSettings.VideoDeviceId = ""; + mediaSettings.StreamingCaptureMode = StreamingCaptureMode.Audio; + mediaSettings.PhotoCaptureSource = PhotoCaptureSource.VideoPreview; + mediaSettings.SharingMode = MediaCaptureSharingMode.SharedReadOnly; // for MRC and lower res camera + return mediaAccessRequester.InitializeAsync(mediaSettings).AsTask(); + } +#endif + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MicrophoneSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MicrophoneSource.cs.meta new file mode 100644 index 0000000..7c3f36c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/MicrophoneSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: dafbc4e8e99e46e41823d17cdabbd651 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/SceneVideoSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/SceneVideoSource.cs new file mode 100644 index 0000000..212d69e --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/SceneVideoSource.cs @@ -0,0 +1,283 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Threading.Tasks; +using Unity.Collections; +using Unity.Collections.LowLevel.Unsafe; +using UnityEngine; +using UnityEngine.Rendering; +using UnityEngine.XR; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Custom video source capturing the Unity scene content as rendered by a given camera, + /// and sending it as a video track through the selected peer connection. + /// + public class SceneVideoSource : CustomVideoSource + { + /// + /// Camera used to capture the scene content, whose rendering is used as + /// video content for the track. + /// + /// + /// If the project uses Multi-Pass stereoscopic rendering, then this camera needs to + /// render to a single eye to produce a single video frame. Generally this means that + /// this needs to be a separate Unity camera from the one used for XR rendering, which + /// is generally rendering to both eyes. + /// + /// If the project uses Single-Pass Instanced stereoscopic rendering, then Unity 2019.1+ + /// is required to make this component work, due to the fact earlier versions of Unity + /// are missing some command buffer API calls to be able to efficiently access the camera + /// backbuffer in this mode. For Unity 2018.3 users who cannot upgrade, use Single-Pass + /// (non-instanced) instead. + /// + [Header("Camera")] + [Tooltip("Camera used to capture the scene content sent by the track.")] + [CaptureCamera] + public Camera SourceCamera; + + /// + /// Camera event indicating the point in time during the Unity frame rendering + /// when the camera rendering is to be captured. + /// + /// This defaults to , which is a reasonable + /// default to capture the entire scene rendering, but can be customized to achieve + /// other effects like capturing only a part of the scene. + /// + [Tooltip("Camera event when to insert the scene capture at.")] + public CameraEvent CameraEvent = CameraEvent.AfterEverything; + + /// + /// Command buffer attached to the camera to capture its rendered content from the GPU + /// and transfer it to the CPU for dispatching to WebRTC. + /// + private CommandBuffer _commandBuffer; + + /// + /// Read-back texture where the content of the camera backbuffer is copied before being + /// transferred from GPU to CPU. The size of the texture is . + /// + private RenderTexture _readBackTex; + + /// + /// Cached width, in pixels, of the readback texture and video frame produced. + /// + private int _readBackWidth; + + /// + /// Cached height, in pixels, of the readback texture and video frame produced. + /// + private int _readBackHeight; + + /// + /// Temporary storage for frames generated by GPU readback until consumed by WebRTC. + /// + private VideoFrameQueue _frameQueue = new VideoFrameQueue(3); + + protected override void OnEnable() + { + if (!SystemInfo.supportsAsyncGPUReadback) + { + Debug.LogError("This platform does not support async GPU readback. Cannot use the SceneVideoSender component."); + enabled = false; + return; + } + + // If no camera provided, attempt to fallback to main camera + if (SourceCamera == null) + { + var mainCameraGameObject = GameObject.FindGameObjectWithTag("MainCamera"); + if (mainCameraGameObject != null) + { + SourceCamera = mainCameraGameObject.GetComponent(); + } + } + if (SourceCamera == null) + { + throw new NullReferenceException("Empty source camera for SceneVideoSource, and could not find MainCamera as fallback."); + } + + CreateCommandBuffer(); + SourceCamera.AddCommandBuffer(CameraEvent, _commandBuffer); + + // Create the track source + base.OnEnable(); + } + + protected override void OnDisable() + { + base.OnDisable(); + + if (_commandBuffer != null) + { + // The camera sometimes goes away before this component. + if (SourceCamera != null) + { + SourceCamera.RemoveCommandBuffer(CameraEvent, _commandBuffer); + } + + _commandBuffer.Dispose(); + _commandBuffer = null; + } + } + + /// + /// Create the command buffer reading the scene content from the source camera back into CPU memory + /// and delivering it via the callback to + /// the underlying WebRTC track. + /// + private void CreateCommandBuffer() + { + if (_commandBuffer != null) + { + throw new InvalidOperationException("Command buffer already initialized."); + } + + // By default, use the camera's render target texture size + _readBackWidth = SourceCamera.scaledPixelWidth; + _readBackHeight = SourceCamera.scaledPixelHeight; + + // Offset and scale into source render target. + Vector2 srcScale = Vector2.one; + Vector2 srcOffset = Vector2.zero; + RenderTextureFormat srcFormat = RenderTextureFormat.ARGB32; + + // Handle stereoscopic rendering for VR/AR. + // See https://unity3d.com/how-to/XR-graphics-development-tips for details. + if (SourceCamera.stereoEnabled) + { + // Readback size is the size of the texture for a single eye. + // The readback will occur on the left eye (chosen arbitrarily). + _readBackWidth = XRSettings.eyeTextureWidth; + _readBackHeight = XRSettings.eyeTextureHeight; + srcFormat = XRSettings.eyeTextureDesc.colorFormat; + + if (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.MultiPass) + { + // Multi-pass is similar to non-stereo, nothing to do. + + // Ensure camera is not rendering to both eyes in multi-pass stereo, otherwise the command buffer + // is executed twice (once per eye) and will produce twice as many frames, which leads to stuttering + // when playing back the video stream resulting from combining those frames. + if (SourceCamera.stereoTargetEye == StereoTargetEyeMask.Both) + { + throw new InvalidOperationException("SourceCamera has stereoscopic rendering enabled to both eyes" + + " with multi-pass rendering (XRSettings.stereoRenderingMode = MultiPass). This is not supported" + + " with SceneVideoSource, as this would produce one image per eye. Either set XRSettings." + + "stereoRenderingMode to single-pass (instanced or not), or use multi-pass with a camera rendering" + + " to a single eye (Camera.stereoTargetEye != Both)."); + } + } + else if (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePass) + { + // Single-pass (non-instanced) stereo use "wide-buffer" packing. + // Left eye corresponds to the left half of the buffer. + srcScale.x = 0.5f; + } + else if ((XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassInstanced) + || (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassMultiview)) // same as instanced (OpenGL) + { + // Single-pass instanced stereo use texture array packing. + // Left eye corresponds to the first array slice. +#if !UNITY_2019_1_OR_NEWER + // https://unity3d.com/unity/alpha/2019.1.0a13 + // "Graphics: Graphics.Blit and CommandBuffer.Blit methods now support blitting to and from texture arrays." + throw new NotSupportedException("Capturing scene content in single-pass instanced stereo rendering requires" + + " blitting from the Texture2DArray render target of the camera, which is not supported before Unity 2019.1." + + " To use this feature, either upgrade your project to Unity 2019.1+ or use single-pass non-instanced stereo" + + " rendering (XRSettings.stereoRenderingMode = SinglePass)."); +#endif + } + } + + _readBackTex = new RenderTexture(_readBackWidth, _readBackHeight, 0, srcFormat, RenderTextureReadWrite.Linear); + + _commandBuffer = new CommandBuffer(); + _commandBuffer.name = "SceneVideoSource"; + + // Explicitly set the render target to instruct the GPU to discard previous content. + // https://docs.unity3d.com/ScriptReference/Rendering.CommandBuffer.Blit.html recommends this. + //< TODO - This doesn't work + //_commandBuffer.SetRenderTarget(_readBackTex, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store); + + // Copy camera target to readback texture + _commandBuffer.BeginSample("Blit"); +#if UNITY_2019_1_OR_NEWER + int srcSliceIndex = 0; // left eye + int dstSliceIndex = 0; + _commandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, /*BuiltinRenderTextureType.CurrentActive*/_readBackTex, + srcScale, srcOffset, srcSliceIndex, dstSliceIndex); +#else + _commandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, /*BuiltinRenderTextureType.CurrentActive*/_readBackTex, srcScale, srcOffset); +#endif + _commandBuffer.EndSample("Blit"); + + // Copy readback texture to RAM asynchronously, invoking the given callback once done + _commandBuffer.BeginSample("Readback"); + _commandBuffer.RequestAsyncReadback(_readBackTex, 0, TextureFormat.BGRA32, OnSceneFrameReady); + _commandBuffer.EndSample("Readback"); + } + + protected override void OnFrameRequested(in FrameRequest request) + { + // Try to dequeue a frame from the internal frame queue + if (_frameQueue.TryDequeue(out Argb32VideoFrameStorage storage)) + { + var frame = new Argb32VideoFrame + { + width = storage.Width, + height = storage.Height, + stride = (int)storage.Width * 4 + }; + unsafe + { + fixed (void* ptr = storage.Buffer) + { + // Complete the request with a view over the frame buffer (no allocation) + // while the buffer is pinned into memory. The native implementation will + // make a copy into a native memory buffer if necessary before returning. + frame.data = new IntPtr(ptr); + request.CompleteRequest(frame); + } + } + + // Put the allocated buffer back in the pool for reuse + _frameQueue.RecycleStorage(storage); + } + } + + /// + /// Callback invoked by the command buffer when the scene frame GPU readback has completed + /// and the frame is available in CPU memory. + /// + /// The completed and possibly failed GPU readback request. + private void OnSceneFrameReady(AsyncGPUReadbackRequest request) + { + // Read back the data from GPU, if available + if (request.hasError) + { + return; + } + NativeArray rawData = request.GetData(); + Debug.Assert(rawData.Length >= _readBackWidth * _readBackHeight * 4); + unsafe + { + byte* ptr = (byte*)NativeArrayUnsafeUtility.GetUnsafePtr(rawData); + + // Enqueue a frame in the internal frame queue. This will make a copy + // of the frame into a pooled buffer owned by the frame queue. + var frame = new Argb32VideoFrame + { + data = (IntPtr)ptr, + stride = _readBackWidth * 4, + width = (uint)_readBackWidth, + height = (uint)_readBackHeight + }; + _frameQueue.Enqueue(frame); + } + } + } +} + diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/SceneVideoSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/SceneVideoSource.cs.meta new file mode 100644 index 0000000..8506163 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/SceneVideoSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 0ec319cd4e697a04191b92210be8c03b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/UniformColorVideoSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/UniformColorVideoSource.cs new file mode 100644 index 0000000..b2c7e7f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/UniformColorVideoSource.cs @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using Microsoft.MixedReality.WebRTC; +using Microsoft.MixedReality.WebRTC.Unity; +using System; +using System.Collections.Generic; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// A video source producing some colored frames generated programmatically. + /// + public class UniformColorVideoSource : CustomVideoSource + { + /// + /// List of colors to cycle through. + /// + [Tooltip("List of colors to cycle through")] + public List Colors = new List(); + + /// + /// Color cycling speed, in change per second. + /// + [Tooltip("Color cycling speed, in change per second")] + public float Speed = 1f; + + /// + /// Frame width, in pixels. + /// + private const int FrameWidth = 16; + + /// + /// Frame height, in pixels. + /// + private const int FrameHeight = 16; + + /// + /// Row stride, in bytes. + /// + private const int FrameStride = FrameWidth * 4; + + /// + /// Frame buffer size, in pixels. + /// + private const int FrameSize = FrameWidth * FrameHeight; + + private uint[] _data = new uint[FrameSize]; + private int _index = -2; + + protected void Start() + { + // Update buffer on start in case OnFrameRequested() is called before Update() + UpdateBuffer(); + } + + protected void Update() + { + UpdateBuffer(); + } + + protected void UpdateBuffer() + { + if (Colors.Count > 0) + { + int index = Mathf.FloorToInt(Time.time * Speed) % Colors.Count; + if (index != _index) + { + _index = index; + var col32 = Colors[index]; + uint color = col32.b | (uint)col32.g << 8 | (uint)col32.r << 16 | (uint)col32.a << 24; + for (int k = 0; k < FrameSize; ++k) + { + _data[k] = color; + } + } + } + else if (_index != -1) + { + // Fallback to bright purple + _index = -1; + uint color = 0xFFFF00FFu; + for (int k = 0; k < FrameSize; ++k) + { + _data[k] = color; + } + } + } + + protected override void OnFrameRequested(in FrameRequest request) + { + var frame = new Argb32VideoFrame + { + width = FrameWidth, + height = FrameHeight, + stride = FrameStride + }; + unsafe + { + fixed (void* ptr = _data) + { + frame.data = (IntPtr)ptr; + request.CompleteRequest(frame); + } + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/UniformColorVideoSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/UniformColorVideoSource.cs.meta new file mode 100644 index 0000000..2622ea5 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/UniformColorVideoSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 29b5ec2fdf160bb4ebd2d5159450fd2f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoReceiver.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoReceiver.cs new file mode 100644 index 0000000..2a9263d --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoReceiver.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using UnityEngine; +using UnityEngine.Events; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Unity event corresponding to a new video stream being started. + /// + [Serializable] + public class VideoStreamStartedEvent : UnityEvent + { }; + + /// + /// Unity event corresponding to an on-going video stream being stopped. + /// + [Serializable] + public class VideoStreamStoppedEvent : UnityEvent + { }; + + /// + /// Endpoint for a WebRTC remote video track. + /// + /// + /// Setting this on a video will enable the corresponding transceiver to receive. + /// A remote track will be exposed through once a connection is established. + /// The video track can optionally be displayed locally with a . + /// + [AddComponentMenu("MixedReality-WebRTC/Video Receiver")] + public class VideoReceiver : MediaReceiver + { + /// + /// Remote video track receiving data from the remote peer. + /// + /// This is null until is set to a non-null value + /// and a remote track is added to that transceiver. + /// + public RemoteVideoTrack VideoTrack { get; private set; } + + /// + /// Event raised when the video stream started. + /// + /// When this event is raised, the followings are true: + /// - The property is a valid remote video track. + /// - The property is true. + /// + /// + /// This event is raised from the main Unity thread to allow Unity object access. + /// + public VideoStreamStartedEvent VideoStreamStarted = new VideoStreamStartedEvent(); + + /// + /// Event raised when the video stream stopped. + /// + /// When this event is raised, the followings are true: + /// - The property is null. + /// - The property is false. + /// + /// + /// This event is raised from the main Unity thread to allow Unity object access. + /// + public VideoStreamStoppedEvent VideoStreamStopped = new VideoStreamStoppedEvent(); + + + /// + public override MediaKind MediaKind => MediaKind.Video; + + /// + public override MediaTrack Track => VideoTrack; + + /// + protected internal override void OnPaired(MediaTrack track) + { + var remoteVideoTrack = (RemoteVideoTrack)track; + Debug.Assert(VideoTrack == null); + VideoTrack = remoteVideoTrack; + VideoStreamStarted.Invoke(VideoTrack); + } + + /// + protected internal override void OnUnpaired(MediaTrack track) + { + Debug.Assert(track is RemoteVideoTrack); + Debug.Assert(VideoTrack == track); + VideoTrack = null; + VideoStreamStopped.Invoke(VideoTrack); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoReceiver.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoReceiver.cs.meta new file mode 100644 index 0000000..585287c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoReceiver.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b5c8b8e916d2cbf40a9dd142c021c4d6 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoRenderer.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoRenderer.cs new file mode 100644 index 0000000..825a880 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoRenderer.cs @@ -0,0 +1,348 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEngine; +using Unity.Profiling; +using System; +using Microsoft.MixedReality.WebRTC.Unity.Editor; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Utility component used to play video frames obtained from a WebRTC video track. This can indiscriminately + /// play video frames from a video track source on the local peer as well as video frames from a remote video + /// receiver obtaining its frame from a remote WebRTC peer. + /// + /// + /// This component writes to the attached Material, + /// via the attached Renderer. + /// + [RequireComponent(typeof(Renderer))] + [AddComponentMenu("MixedReality-WebRTC/Video Renderer")] + public class VideoRenderer : MonoBehaviour + { + [Tooltip("Max playback framerate, in frames per second")] + [Range(0.001f, 120f)] + public float MaxFramerate = 30f; + + [Header("Statistics")] + [ToggleLeft] + public bool EnableStatistics = true; + + /// + /// A textmesh onto which frame load stat data will be written + /// + /// + /// This is how fast the frames are given from the underlying implementation + /// + [Tooltip("A textmesh onto which frame load stat data will be written")] + public TextMesh FrameLoadStatHolder; + + /// + /// A textmesh onto which frame present stat data will be written + /// + /// + /// This is how fast we render frames to the display + /// + [Tooltip("A textmesh onto which frame present stat data will be written")] + public TextMesh FramePresentStatHolder; + + /// + /// A textmesh into which frame skip stat dta will be written + /// + /// + /// This is how often we skip presenting an underlying frame + /// + [Tooltip("A textmesh onto which frame skip stat data will be written")] + public TextMesh FrameSkipStatHolder; + + // Source that this renderer is currently subscribed to. + private IVideoSource _source; + + /// + /// Internal reference to the attached texture + /// + private Texture2D _textureY = null; // also used for ARGB32 + private Texture2D _textureU = null; + private Texture2D _textureV = null; + + /// + /// Internal timing counter + /// + private float lastUpdateTime = 0.0f; + + private Material videoMaterial; + private float _minUpdateDelay; + + private VideoFrameQueue _i420aFrameQueue = null; + private VideoFrameQueue _argb32FrameQueue = null; + + private ProfilerMarker displayStatsMarker = new ProfilerMarker("DisplayStats"); + private ProfilerMarker loadTextureDataMarker = new ProfilerMarker("LoadTextureData"); + private ProfilerMarker uploadTextureToGpuMarker = new ProfilerMarker("UploadTextureToGPU"); + + private void Start() + { + CreateEmptyVideoTextures(); + + // Leave 3ms of margin, otherwise it misses 1 frame and drops to ~20 FPS + // when Unity is running at 60 FPS. + _minUpdateDelay = Mathf.Max(0f, 1f / Mathf.Max(0.001f, MaxFramerate) - 0.003f); + } + + /// + /// Start rendering the passed source. + /// + /// + /// Can be used to handle or . + /// + public void StartRendering(IVideoSource source) + { + bool isRemote = (source is RemoteVideoTrack); + int frameQueueSize = (isRemote ? 5 : 3); + + switch (source.FrameEncoding) + { + case VideoEncoding.I420A: + _i420aFrameQueue = new VideoFrameQueue(frameQueueSize); + source.I420AVideoFrameReady += I420AVideoFrameReady; + break; + + case VideoEncoding.Argb32: + _argb32FrameQueue = new VideoFrameQueue(frameQueueSize); + source.Argb32VideoFrameReady += Argb32VideoFrameReady; + break; + } + } + + /// + /// Stop rendering the passed source. Must be called with the same source passed to + /// + /// + /// Can be used to handle or . + /// + public void StopRendering(IVideoSource _) + { + // Clear the video display to not confuse the user who could otherwise + // think that the video is still playing but is lagging/frozen. + CreateEmptyVideoTextures(); + } + + protected void OnDisable() + { + // Clear the video display to not confuse the user who could otherwise + // think that the video is still playing but is lagging/frozen. + CreateEmptyVideoTextures(); + } + + protected void I420AVideoFrameReady(I420AVideoFrame frame) + { + // This callback is generally from a non-UI thread, but Unity object access is only allowed + // on the main UI thread, so defer to that point. + _i420aFrameQueue.Enqueue(frame); + } + + protected void Argb32VideoFrameReady(Argb32VideoFrame frame) + { + // This callback is generally from a non-UI thread, but Unity object access is only allowed + // on the main UI thread, so defer to that point. + _argb32FrameQueue.Enqueue(frame); + } + + private void CreateEmptyVideoTextures() + { + // Create a default checkboard texture which visually indicates + // that no data is available. This is useful for debugging and + // for the user to know about the state of the video. + _textureY = new Texture2D(2, 2); + _textureY.SetPixel(0, 0, Color.blue); + _textureY.SetPixel(1, 1, Color.blue); + _textureY.Apply(); + _textureU = new Texture2D(2, 2); + _textureU.SetPixel(0, 0, Color.blue); + _textureU.SetPixel(1, 1, Color.blue); + _textureU.Apply(); + _textureV = new Texture2D(2, 2); + _textureV.SetPixel(0, 0, Color.blue); + _textureV.SetPixel(1, 1, Color.blue); + _textureV.Apply(); + + // Assign that texture to the video player's Renderer component + videoMaterial = GetComponent().material; + if (_i420aFrameQueue != null) + { + videoMaterial.SetTexture("_YPlane", _textureY); + videoMaterial.SetTexture("_UPlane", _textureU); + videoMaterial.SetTexture("_VPlane", _textureV); + } + else if (_argb32FrameQueue != null) + { + videoMaterial.SetTexture("_MainTex", _textureY); + } + } + + //// + /// Unity Engine Start() hook + /// + /// + /// https://docs.unity3d.com/ScriptReference/MonoBehaviour.Start.html + /// + private void Update() + { + if ((_i420aFrameQueue != null) || (_argb32FrameQueue != null)) + { +#if UNITY_EDITOR + // Inside the Editor, constantly update _minUpdateDelay to + // react to user changes to MaxFramerate. + + // Leave 3ms of margin, otherwise it misses 1 frame and drops to ~20 FPS + // when Unity is running at 60 FPS. + _minUpdateDelay = Mathf.Max(0f, 1f / Mathf.Max(0.001f, MaxFramerate) - 0.003f); +#endif + // FIXME - This will overflow/underflow the queue if not set at the same rate + // as the one at which frames are enqueued! + var curTime = Time.time; + if (curTime - lastUpdateTime >= _minUpdateDelay) + { + if (_i420aFrameQueue != null) + { + TryProcessI420AFrame(); + } + else if (_argb32FrameQueue != null) + { + TryProcessArgb32Frame(); + } + lastUpdateTime = curTime; + } + + if (EnableStatistics) + { + // Share our stats values, if possible. + using (var profileScope = displayStatsMarker.Auto()) + { + IVideoFrameQueue stats = (_i420aFrameQueue != null ? (IVideoFrameQueue)_i420aFrameQueue : _argb32FrameQueue); + if (FrameLoadStatHolder != null) + { + FrameLoadStatHolder.text = stats.QueuedFramesPerSecond.ToString("F2"); + } + if (FramePresentStatHolder != null) + { + FramePresentStatHolder.text = stats.DequeuedFramesPerSecond.ToString("F2"); + } + if (FrameSkipStatHolder != null) + { + FrameSkipStatHolder.text = stats.DroppedFramesPerSecond.ToString("F2"); + } + } + } + } + } + + /// + /// Internal helper that attempts to process frame data in the frame queue + /// + private void TryProcessI420AFrame() + { + if (_i420aFrameQueue.TryDequeue(out I420AVideoFrameStorage frame)) + { + int lumaWidth = (int)frame.Width; + int lumaHeight = (int)frame.Height; + if (_textureY == null || (_textureY.width != lumaWidth || _textureY.height != lumaHeight)) + { + _textureY = new Texture2D(lumaWidth, lumaHeight, TextureFormat.R8, mipChain: false); + videoMaterial.SetTexture("_YPlane", _textureY); + } + int chromaWidth = lumaWidth / 2; + int chromaHeight = lumaHeight / 2; + if (_textureU == null || (_textureU.width != chromaWidth || _textureU.height != chromaHeight)) + { + _textureU = new Texture2D(chromaWidth, chromaHeight, TextureFormat.R8, mipChain: false); + videoMaterial.SetTexture("_UPlane", _textureU); + } + if (_textureV == null || (_textureV.width != chromaWidth || _textureV.height != chromaHeight)) + { + _textureV = new Texture2D(chromaWidth, chromaHeight, TextureFormat.R8, mipChain: false); + videoMaterial.SetTexture("_VPlane", _textureV); + } + + // Copy data from C# buffer into system memory managed by Unity. + // Note: This only "looks right" in Unity because we apply the + // "YUVFeedShader(Unlit)" to the texture (converting YUV planar to RGB). + // Note: Texture2D.LoadRawTextureData() expects some bottom-up texture data but + // the WebRTC video frame is top-down, so the image is uploaded vertically flipped, + // and needs to be flipped by in the shader used to sample it. See #388. + using (var profileScope = loadTextureDataMarker.Auto()) + { + unsafe + { + fixed (void* buffer = frame.Buffer) + { + var src = new IntPtr(buffer); + int lumaSize = lumaWidth * lumaHeight; + _textureY.LoadRawTextureData(src, lumaSize); + src += lumaSize; + int chromaSize = chromaWidth * chromaHeight; + _textureU.LoadRawTextureData(src, chromaSize); + src += chromaSize; + _textureV.LoadRawTextureData(src, chromaSize); + } + } + } + + // Upload from system memory to GPU + using (var profileScope = uploadTextureToGpuMarker.Auto()) + { + _textureY.Apply(); + _textureU.Apply(); + _textureV.Apply(); + } + + // Recycle the video frame packet for a later frame + _i420aFrameQueue.RecycleStorage(frame); + } + } + + /// + /// Internal helper that attempts to process frame data in the frame queue + /// + private void TryProcessArgb32Frame() + { + if (_argb32FrameQueue.TryDequeue(out Argb32VideoFrameStorage frame)) + { + int width = (int)frame.Width; + int height = (int)frame.Height; + if (_textureY == null || (_textureY.width != width || _textureY.height != height)) + { + _textureY = new Texture2D(width, height, TextureFormat.BGRA32, mipChain: false); + videoMaterial.SetTexture("_MainTex", _textureY); + } + + // Copy data from C# buffer into system memory managed by Unity. + // Note: Texture2D.LoadRawTextureData() expects some bottom-up texture data but + // the WebRTC video frame is top-down, so the image is uploaded vertically flipped, + // and needs to be flipped by in the shader used to sample it. See #388. + using (var profileScope = loadTextureDataMarker.Auto()) + { + unsafe + { + fixed (void* buffer = frame.Buffer) + { + var src = new IntPtr(buffer); + int size = width * height * 4; + _textureY.LoadRawTextureData(src, size); + } + } + } + + // Upload from system memory to GPU + using (var profileScope = uploadTextureToGpuMarker.Auto()) + { + _textureY.Apply(); + } + + // Recycle the video frame packet for a later frame + _argb32FrameQueue.RecycleStorage(frame); + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoRenderer.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoRenderer.cs.meta new file mode 100644 index 0000000..c52c6eb --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoRenderer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: cb6b02c5bb7ab6b4ea6996c956ebdd21 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoTrackSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoTrackSource.cs new file mode 100644 index 0000000..193c81b --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoTrackSource.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Collections.Generic; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// This component represents a video track source, an entity which produces raw video + /// frames for one or more tracks. The source can be added on a peer connection media + /// line to be sent through that peer connection. It is a standalone object, independent + /// of any peer connection, and can be shared with multiple of them. + /// + /// + /// + /// + public abstract class VideoTrackSource : MediaTrackSource + { + /// + /// Video track source object from the underlying C# library that this component encapsulates. + /// + /// The object is owned by this component, which will create it and dispose of it automatically. + /// + public WebRTC.VideoTrackSource Source { get; private set; } = null; + + /// + /// Event raised when the video stream started. + /// + /// When this event is raised, the followings are true: + /// - The property is a valid local video track. + /// - The will become true just after the event + /// is raised, by design. + /// + /// + /// This event is raised from the main Unity thread to allow Unity object access. + /// + public VideoStreamStartedEvent VideoStreamStarted = new VideoStreamStartedEvent(); + + /// + /// Event raised when the video stream stopped. + /// + /// When this event is raised, the followings are true: + /// - The property is null. + /// - The has just become false right before the event + /// was raised, by design. + /// + /// + /// This event is raised from the main Unity thread to allow Unity object access. + /// + public VideoStreamStoppedEvent VideoStreamStopped = new VideoStreamStoppedEvent(); + + /// + public override bool IsLive => Source != null; + + /// + public override MediaKind MediaKind => MediaKind.Video; + + protected void AttachSource(WebRTC.VideoTrackSource source) + { + Source = source; + AttachToMediaLines(); + VideoStreamStarted.Invoke(Source); + } + + protected void DisposeSource() + { + if (Source != null) + { + VideoStreamStopped.Invoke(Source); + DetachFromMediaLines(); + + // Video track sources are disposable objects owned by the user (this component) + Source.Dispose(); + Source = null; + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoTrackSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoTrackSource.cs.meta new file mode 100644 index 0000000..7e028ae --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/VideoTrackSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 62f9e054f11129b4f9455289ba6e474b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WebcamSource.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WebcamSource.cs new file mode 100644 index 0000000..d2fcc97 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WebcamSource.cs @@ -0,0 +1,397 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using UnityEngine; + +#if ENABLE_WINMD_SUPPORT +using global::Windows.Graphics.Holographic; +#endif + +#if UNITY_WSA && !UNITY_EDITOR +using System.Threading.Tasks; +using global::Windows.UI.Core; +using global::Windows.Foundation; +using global::Windows.Media.Core; +using global::Windows.Media.Capture; +using global::Windows.ApplicationModel.Core; +#endif + +#if !UNITY_EDITOR && UNITY_ANDROID +using UnityEngine.Android; +#endif + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Video capture format selection mode for a local video source. + /// + public enum LocalVideoSourceFormatMode + { + /// + /// Automatically select a good resolution and framerate based on the runtime detection + /// of the device the application is running on. + /// This currently overwrites the default WebRTC selection only on HoloLens devices. + /// + Automatic, + + /// + /// Manually specify a video profile unique ID and/or a kind of video profile to use, + /// and additional optional constraints on the resolution and framerate of that profile. + /// + Manual + } + + /// + /// Additional optional constraints applied to the resolution and framerate when selecting + /// a video capture format. + /// + [Serializable] + public struct VideoCaptureConstraints + { + /// + /// Desired resolution width, in pixels, or zero for unconstrained. + /// + public int width; + + /// + /// Desired resolution height, in pixels, or zero for unconstrained. + /// + public int height; + + /// + /// Desired framerate, in frame-per-second, or zero for unconstrained. + /// Note: the comparison is exact, and floating point imprecision may + /// prevent finding a matching format. Use with caution. + /// + public double framerate; + } + + /// + /// This component represents a local video sender generating video frames from a local + /// video capture device (webcam). + /// + [AddComponentMenu("MixedReality-WebRTC/Webcam Source")] + public class WebcamSource : VideoTrackSource + { + /// + /// Optional identifier of the webcam to use. Setting this value forces using the given + /// webcam, and will fail opening any other webcam. + /// Valid values are obtained by calling . + /// + /// + /// This property is purposely not shown in the Unity inspector window, as there is very + /// little reason to hard-code a value for it, which would only work on a specific device + /// with a given immutable hardware. It is still serialized on the off-chance that there + /// is a valid use case for hard-coding it. + /// + /// + [HideInInspector] + public VideoCaptureDevice WebcamDevice = default; + + /// + /// Enable Mixed Reality Capture (MRC) if available on the local device. + /// This option has no effect on devices not supporting MRC, and is silently ignored. + /// + [Tooltip("Enable Mixed Reality Capture (MRC) if available on the local device")] + public bool EnableMixedRealityCapture = true; + + /// + /// Enable the on-screen recording indicator when Mixed Reality Capture (MRC) is + /// available and enabled. + /// This option has no effect on devices not supporting MRC, or if MRC is not enabled. + /// + [Tooltip("Enable the on-screen recording indicator when MRC is enabled")] + public bool EnableMRCRecordingIndicator = true; + + /// + /// Selection mode for the video capture format. + /// + public LocalVideoSourceFormatMode FormatMode = LocalVideoSourceFormatMode.Automatic; + + /// + /// For manual , unique identifier of the video profile to use, + /// or an empty string to leave unconstrained. + /// + public string VideoProfileId = string.Empty; + + /// + /// For manual , kind of video profile to use among a list of predefined + /// ones, or an empty string to leave unconstrained. + /// + public VideoProfileKind VideoProfileKind = VideoProfileKind.Unspecified; + + /// + /// For manual , optional constraints on the resolution and framerate of + /// the capture format. These constraints are additive, meaning a matching format must satisfy + /// all of them at once, in addition of being restricted to the formats supported by the selected + /// video profile or kind of profile. Any negative or zero value means no constraint. + /// + /// + /// Video capture formats for HoloLens 1 and HoloLens 2 are available here: + /// https://docs.microsoft.com/en-us/windows/mixed-reality/locatable-camera + /// + public VideoCaptureConstraints Constraints = new VideoCaptureConstraints() + { + width = 0, + height = 0, + framerate = 0.0 + }; + +#if !UNITY_EDITOR && UNITY_ANDROID + protected bool _androidCameraRequestPending = false; + protected float _androidCameraRequestRetryUntilTime = 0f; +#endif + + protected async void OnEnable() + { + if (Source != null) + { + return; + } + +#if !UNITY_EDITOR && UNITY_ANDROID + // Ensure Android binding is initialized before accessing the native implementation + Android.Initialize(); + + // Check for permission to access the camera + if (!Permission.HasUserAuthorizedPermission(Permission.Camera)) + { + if (!_androidCameraRequestPending) + { + // Monitor the OnApplicationFocus(true) event during the next 5 minutes, + // and check for permission again each time (see below why). + _androidCameraRequestPending = true; + _androidCameraRequestRetryUntilTime = Time.time + 300; + + // Display dialog requesting user permission. This will return immediately, + // and unfortunately there's no good way to tell when this completes. As a rule + // of thumb, application should lose focus, so check when focus resumes should + // be sufficient without having to poll every frame. + Permission.RequestUserPermission(Permission.Camera); + } + return; + } +#elif UNITY_WSA && !UNITY_EDITOR + // Request UWP access to video capture. The OS may show some popup dialog to the + // user to request permission. This will succeed only if the user grants permission. + try + { + // Note that the UWP UI thread and the main Unity app thread are always different. + // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html + // We leave the code below as an example of generic handling in case this would be used in + // some other place, and in case a future version of Unity decided to change that assumption, + // but currently OnEnable() is always invoked from the main Unity app thread so here the first + // branch is never taken. + if (UnityEngine.WSA.Application.RunningOnUIThread()) + { + await RequestAccessAsync(); + } + else + { + UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true); + } + } + catch (Exception ex) + { + // Log an error and prevent activation + Debug.LogError($"Video access failure: {ex.Message}."); + this.enabled = false; + return; + } +#endif + + // Handle automatic capture format constraints + string videoProfileId = VideoProfileId; + var videoProfileKind = VideoProfileKind; + int width = Constraints.width; + int height = Constraints.height; + double framerate = Constraints.framerate; +#if ENABLE_WINMD_SUPPORT + if (FormatMode == LocalVideoSourceFormatMode.Automatic) + { + // Do not constrain resolution by default, unless the device calls for it (see below). + width = 0; // auto + height = 0; // auto + + // Avoid constraining the framerate; this is generally not necessary (formats are listed + // with higher framerates first) and is error-prone as some formats report 30.0 FPS while + // others report 29.97 FPS. + framerate = 0; // auto + + // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth + if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable) + { + if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque) + { + if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86) + { + // Holographic AR (transparent) x86 platform - Assume HoloLens 1 + videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing + width = 896; // Target 896 x 504 + } + else + { + // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2 + videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing; + width = 960; // Target 960 x 540 + } + } + } + } +#elif !UNITY_EDITOR && UNITY_ANDROID + if (FormatMode == LocalVideoSourceFormatMode.Automatic) + { + // Avoid constraining the framerate; this is generally not necessary (formats are listed + // with higher framerates first) and is error-prone as some formats report 30.0 FPS while + // others report 29.97 FPS. + framerate = 0; // auto + + string deviceId = WebcamDevice.id; + if (string.IsNullOrEmpty(deviceId)) + { + IReadOnlyList listedDevices = await PeerConnection.GetVideoCaptureDevicesAsync(); + if (listedDevices.Count > 0) + { + deviceId = listedDevices[0].id; + } + } + if (!string.IsNullOrEmpty(deviceId)) + { + // Find the closest format to 720x480, independent of framerate + IReadOnlyList formats = await DeviceVideoTrackSource.GetCaptureFormatsAsync(deviceId); + double smallestDiff = double.MaxValue; + bool hasFormat = false; + foreach (var fmt in formats) + { + double diff = Math.Abs(fmt.width - 720) + Math.Abs(fmt.height - 480); + if ((diff < smallestDiff) || !hasFormat) + { + hasFormat = true; + smallestDiff = diff; + width = (int)fmt.width; + height = (int)fmt.height; + } + } + if (hasFormat) + { + Debug.Log($"WebcamSource automated mode selected resolution {width}x{height} for Android video capture device #{deviceId}."); + } + } + } +#endif + + // TODO - Fix codec selection (was as below before change) + + // Force again PreferredVideoCodec right before starting the local capture, + // so that modifications to the property done after OnPeerInitialized() are + // accounted for. + //< FIXME + //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec; + + // Check H.264 requests on Desktop (not supported) + //#if !ENABLE_WINMD_SUPPORT + // if (PreferredVideoCodec == "H264") + // { + // Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead."); + // PreferredVideoCodec = "VP8"; + // } + //#endif + + // Create the track + var deviceConfig = new LocalVideoDeviceInitConfig + { + videoDevice = WebcamDevice, + videoProfileId = videoProfileId, + videoProfileKind = videoProfileKind, + width = (width > 0 ? (uint?)width : null), + height = (height > 0 ? (uint?)height : null), + framerate = (framerate > 0 ? (double?)framerate : null), + enableMrc = EnableMixedRealityCapture, + enableMrcRecordingIndicator = EnableMRCRecordingIndicator + }; + try + { + var source = await DeviceVideoTrackSource.CreateAsync(deviceConfig); + AttachSource(source); + } + catch (Exception ex) + { + Debug.LogError($"Failed to create device track source for {nameof(WebcamSource)} component '{name}'."); + Debug.LogException(ex, this); + return; + } + } + +#if !UNITY_EDITOR && UNITY_ANDROID + protected void OnApplicationFocus(bool hasFocus) + { + if (!hasFocus) + { + return; + } + + // If focus is restored after a pending camera access request, check the permission again + if (_androidCameraRequestPending) + { + _androidCameraRequestPending = false; + + if (Permission.HasUserAuthorizedPermission(Permission.Camera)) + { + // If now authorized, start capture as if just enabled + Debug.Log("User granted authorization to access webcam, starting WebcamSource now..."); + OnEnable(); + } + else if (Time.time <= _androidCameraRequestRetryUntilTime) + { + // OnApplicationFocus(true) may be called for unrelated reason(s) so do not disable on first call, + // but instead retry during a given period after the request was made, until we're reasonably + // confident that the user dialog was actually answered (that is, that OnApplicationFocus(true) was + // called because of that dialog, and not because of another reason). + // This may lead to false positives (checking permission after the user denied it), but the user + // dialog will not popup again, so this is all in the background and essentially harmless. + _androidCameraRequestPending = true; + } + else + { + // Some reasonable time passed since we made the permission request, and we still get a denied + // answer, so assume the user actually denied it and stop retrying. + _androidCameraRequestRetryUntilTime = 0f; + Debug.LogError("User denied Camera permission; cannot use WebcamSource. Forcing enabled=false."); + enabled = false; + } + } + } +#endif + + protected void OnDisable() + { + DisposeSource(); + } + +#if UNITY_WSA && !UNITY_EDITOR + /// + /// Internal UWP helper to ensure device access. + /// + /// + /// This must be called from the main UWP UI thread (not the main Unity app thread). + /// + private Task RequestAccessAsync() + { + // On UWP the app must have the "webcam" capability, and the user must allow webcam + // access. So check that access before trying to initialize the WebRTC library, as this + // may result in a popup window being displayed the first time, which needs to be accepted + // before the camera can be accessed by WebRTC. + var mediaAccessRequester = new MediaCapture(); + var mediaSettings = new MediaCaptureInitializationSettings(); + mediaSettings.AudioDeviceId = ""; + mediaSettings.VideoDeviceId = ""; + mediaSettings.StreamingCaptureMode = StreamingCaptureMode.Video; + mediaSettings.PhotoCaptureSource = PhotoCaptureSource.VideoPreview; + mediaSettings.SharingMode = MediaCaptureSharingMode.SharedReadOnly; // for MRC and lower res camera + return mediaAccessRequester.InitializeAsync(mediaSettings).AsTask(); + } +#endif + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WebcamSource.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WebcamSource.cs.meta new file mode 100644 index 0000000..beea681 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WebcamSource.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8a3a632a87fbf1e4db435c4abae3ac33 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WorkQueue.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WorkQueue.cs new file mode 100644 index 0000000..1585c9b --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WorkQueue.cs @@ -0,0 +1,99 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Concurrent; +using System.Diagnostics; +using System.Threading; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Base class providing some utility work queue to dispatch free-threaded actions + /// to the main Unity application thread, where the handler(s) can safely access + /// Unity objects. + /// + public class WorkQueue : MonoBehaviour + { + /// + /// Check if the current thread is the main Unity application thread where + /// it is safe to access Unity objects. + /// + /// + /// Should be only called once the object is awake. + /// + public bool IsMainAppThread + { + get + { + UnityEngine.Debug.Assert(_mainAppThread != null, "This method can only be called once the object is awake."); + return Thread.CurrentThread == _mainAppThread; + } + } + + /// + /// Ensure the current method is running on the main Unity application thread. + /// + /// + /// Should be only called once the object is awake. + /// + [Conditional("UNITY_ASSERTIONS")] + public void EnsureIsMainAppThread() + { + UnityEngine.Debug.Assert(IsMainAppThread, "This method can only be called from the main Unity application thread."); + } + + /// + /// Invoke the specified action on the main Unity app thread. + /// + /// The action to execute. + /// + /// If this object is awake, and this method is called from the main Unity app thread, + /// will be executed synchronously. Otherwise, + /// will be called during the next call to this object's . + /// + public void InvokeOnAppThread(Action action) + { + if (_mainAppThread != null && IsMainAppThread) + { + action(); + } + else + { + _mainThreadWorkQueue.Enqueue(action); + } + } + + protected virtual void Awake() + { + // Awake() is always called from the main Unity app thread + _mainAppThread = Thread.CurrentThread; + } + + /// + /// Implementation of MonoBehaviour.Update + /// to execute from the main Unity app thread any background work enqueued from free-threaded callbacks. + /// + protected virtual void Update() + { + // Execute any pending work enqueued by background tasks + while (_mainThreadWorkQueue.TryDequeue(out Action workload)) + { + workload(); + } + } + + /// + /// Internal queue used to marshal work back to the main Unity app thread, which is the + /// only thread where access to Unity objects is allowed. This is used by free-threaded + /// callbacks to defer some of their work, generally a final user notification via an event. + /// + private readonly ConcurrentQueue _mainThreadWorkQueue = new ConcurrentQueue(); + + /// + /// Reference to the main Unity application thread where it is safe to access Unity objects. + /// + private Thread _mainAppThread = null; + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WorkQueue.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WorkQueue.cs.meta new file mode 100644 index 0000000..4ffb98b --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Media/WorkQueue.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 96d33009488b64d4abb57a859dcd8966 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/PeerConnection.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/PeerConnection.cs new file mode 100644 index 0000000..9563bfa --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/PeerConnection.cs @@ -0,0 +1,861 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using UnityEngine; +using UnityEngine.Events; +using System.Collections.Concurrent; +using System.Text; +using System.Runtime.CompilerServices; + +#if UNITY_WSA && !UNITY_EDITOR +using global::Windows.UI.Core; +using global::Windows.Foundation; +using global::Windows.Media.Core; +using global::Windows.Media.Capture; +using global::Windows.ApplicationModel.Core; +#endif + +[assembly: InternalsVisibleTo("Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime")] + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Enumeration of the different types of ICE servers. + /// + public enum IceType + { + /// + /// Indicates there is no ICE information + /// + /// + /// Under normal use, this should not be used + /// + None = 0, + + /// + /// Indicates ICE information is of type STUN + /// + /// + /// https://en.wikipedia.org/wiki/STUN + /// + Stun, + + /// + /// Indicates ICE information is of type TURN + /// + /// + /// https://en.wikipedia.org/wiki/Traversal_Using_Relays_around_NAT + /// + Turn + } + + /// + /// ICE server as a serializable data structure for the Unity inspector. + /// + [Serializable] + public struct ConfigurableIceServer + { + /// + /// The type of ICE server. + /// + [Tooltip("Type of ICE server")] + public IceType Type; + + /// + /// The unqualified URI of the server. + /// + /// + /// The URI must not have any stun: or turn: prefix. + /// + [Tooltip("ICE server URI, without any stun: or turn: prefix.")] + public string Uri; + + /// + /// Convert the server to the representation the underlying implementation use. + /// + /// The stringified server information. + public override string ToString() + { + return string.Format("{0}:{1}", Type.ToString().ToLowerInvariant(), Uri); + } + } + + /// + /// A UnityEvent that represents a WebRTC error event. + /// + [Serializable] + public class WebRTCErrorEvent : UnityEvent + { + } + + /// + /// Exception thrown when an invalid transceiver media kind was detected, generally when trying to pair a + /// transceiver of one media kind with a media line of a different media kind. + /// + public class InvalidTransceiverMediaKindException : Exception + { + /// + public InvalidTransceiverMediaKindException() + : base("Invalid transceiver kind.") + { + } + + /// + public InvalidTransceiverMediaKindException(string message) + : base(message) + { + } + + /// + public InvalidTransceiverMediaKindException(string message, Exception inner) + : base(message, inner) + { + } + } + + /// + /// High-level wrapper for Unity WebRTC functionalities. + /// This is the API entry point for establishing a connection with a remote peer. + /// + /// + /// The component initializes the underlying asynchronously + /// when enabled, and closes it when disabled. The event is called + /// when the connection object is ready to be used. Call + /// to create an offer for a remote peer. + /// + [AddComponentMenu("MixedReality-WebRTC/Peer Connection")] + public class PeerConnection : WorkQueue, ISerializationCallbackReceiver + { + /// + /// Retrieves the underlying peer connection object once initialized. + /// + /// + /// If has not fired, this will be null. + /// + public WebRTC.PeerConnection Peer { get; private set; } = null; + + + #region Behavior settings + + /// + /// Automatically create a new offer whenever a renegotiation needed event is received. + /// + /// + /// Note that the renegotiation needed event may be dispatched asynchronously, so it is + /// discourages to toggle this field ON and OFF. Instead, the user should choose an + /// approach (manual or automatic) and stick to it. + /// + /// In particular, temporarily setting this to false during a batch of changes and + /// setting it back to true right after the last change may or may not produce an + /// automatic offer, depending on whether the negotiated event was dispatched while the + /// property was still false or not. + /// + [Tooltip("Automatically create a new offer when receiving a renegotiation needed event.")] + [Editor.ToggleLeft] + public bool AutoCreateOfferOnRenegotiationNeeded = true; + + /// + /// Flag to log all errors to the Unity console automatically. + /// + [Tooltip("Automatically log all errors to the Unity console.")] + [Editor.ToggleLeft] + public bool AutoLogErrorsToUnityConsole = true; + + #endregion + + + #region Interactive Connectivity Establishment (ICE) + + /// + /// Set of ICE servers the WebRTC library will use to try to establish a connection. + /// + [Tooltip("Optional set of ICE servers (STUN and/or TURN)")] + public List IceServers = new List() + { + new ConfigurableIceServer() + { + Type = IceType.Stun, + Uri = "stun.l.google.com:19302" + } + }; + + /// + /// Optional username for the ICE servers. + /// + [Tooltip("Optional username for the ICE servers")] + public string IceUsername; + + /// + /// Optional credential for the ICE servers. + /// + [Tooltip("Optional credential for the ICE servers")] + public string IceCredential; + + #endregion + + + #region Events + + /// + /// Event fired after the peer connection is initialized and ready for use. + /// + [Tooltip("Event fired after the peer connection is initialized and ready for use")] + public UnityEvent OnInitialized = new UnityEvent(); + + /// + /// Event fired after the peer connection is shut down and cannot be used anymore. + /// + [Tooltip("Event fired after the peer connection is shut down and cannot be used anymore")] + public UnityEvent OnShutdown = new UnityEvent(); + + /// + /// Event that occurs when a WebRTC error occurs + /// + [Tooltip("Event that occurs when a WebRTC error occurs")] + public WebRTCErrorEvent OnError = new WebRTCErrorEvent(); + + #endregion + + + #region Private variables + + /// + /// Underlying native peer connection wrapper. + /// + /// + /// Unlike the public property, this is never NULL, + /// but can be an uninitialized peer. + /// + private WebRTC.PeerConnection _nativePeer = null; + + /// + /// List of transceiver media lines and their associated media sender/receiver components. + /// + [SerializeField] + private List _mediaLines = new List(); + + // Indicates if Awake has been called. Used by media lines to figure out whether to + // invoke callbacks or not. + internal bool IsAwake { get; private set; } + + #endregion + + + #region Public methods + + /// + /// Enumerate the video capture devices available as a WebRTC local video feed source. + /// + /// The list of local video capture devices available to WebRTC. + public static Task> GetVideoCaptureDevicesAsync() + { + return DeviceVideoTrackSource.GetCaptureDevicesAsync(); + } + + /// + /// Initialize the underlying WebRTC peer connection. + /// + /// + /// This method must be called once before using the peer connection. If + /// is true then it is automatically called during MonoBehaviour.Start(). + /// + /// This method is asynchronous and completes its task when the initializing completed. + /// On successful completion, it also trigger the event. + /// Note however that this completion is free-threaded and complete immediately when the + /// underlying peer connection is initialized, whereas any + /// event handler is invoked when control returns to the main Unity app thread. The former + /// is faster, but does not allow accessing the underlying peer connection because it + /// returns before executed. Therefore it is generally + /// recommended to listen to the event, and ignore the returned + /// object. + /// + /// If the peer connection is already initialized, this method returns immediately with + /// a object. The caller can check + /// that the property is non-null to confirm that the connection + /// is in fact initialized. + /// + private Task InitializeAsync(CancellationToken token = default(CancellationToken)) + { + CreateNativePeerConnection(); + + // Ensure Android binding is initialized before accessing the native implementation + Android.Initialize(); + +#if UNITY_WSA && !UNITY_EDITOR + if (UnityEngine.WSA.Application.RunningOnUIThread()) +#endif + { + return RequestAccessAndInitAsync(token); + } +#if UNITY_WSA && !UNITY_EDITOR + else + { + UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAndInitAsync(token), waitUntilDone: true); + return Task.CompletedTask; + } +#endif + } + + /// + /// Add a new media line of the given kind. + /// + /// This method creates a media line, which expresses an intent from the user to get a transceiver. + /// The actual object creation is delayed until a session + /// negotiation is completed. + /// + /// Once the media line is created, the user can then assign its and + /// properties to express their intent to send and/or receive some media + /// through the transceiver that will be associated with that media line once a session is negotiated. + /// This information is used in subsequent negotiations to derive a + /// to negotiate. Therefore users + /// should avoid modifying the property manually when using + /// the Unity library, and instead modify the and + /// properties. + /// + /// The kind of media (audio or video) for the transceiver. + /// A newly created media line, which will be associated with a transceiver once the next session + /// is negotiated. + public MediaLine AddMediaLine(MediaKind kind) + { + var ml = new MediaLine(this, kind); + _mediaLines.Add(ml); + return ml; + } + + /// + /// Create a new connection offer, either for a first connection to the remote peer, or for + /// renegotiating some new or removed transceivers. + /// + /// This method submits an internal task to create an SDP offer message. Once the message is + /// created, the implementation raises the + /// event to allow the user to send the message via the chosen signaling solution to the remote + /// peer. + /// + ///
+ ///
IMPORTANT
+ ///

+ /// This method is very similar to the CreateOffer() method available in the underlying C# library, + /// and actually calls it. However it also performs additional work in order to pair the transceivers of + /// the local and remote peer. Therefore Unity applications must call this method instead of the C# library + /// one to ensure transceiver pairing works as intended. + ///

+ ///
+ ///
+ /// + /// true if the offer creation task was submitted successfully, and false otherwise. + /// The offer SDP message is always created asynchronously. + /// + /// + /// This method can only be called from the main Unity application thread, where Unity objects can + /// be safely accessed. + /// + public bool StartConnection() + { + // MediaLine manipulates some MonoBehaviour objects when managing senders and receivers + EnsureIsMainAppThread(); + + if (Peer == null) + { + throw new InvalidOperationException("Cannot create an offer with an uninitialized peer."); + } + + // Batch all changes into a single offer + AutoCreateOfferOnRenegotiationNeeded = false; + + // Add all new transceivers for local tracks. Since transceivers are only paired by negotiated mid, + // we need to know which peer sends the offer before adding the transceivers on the offering side only, + // and then pair them on the receiving side. Otherwise they are duplicated, as the transceiver mid from + // locally-created transceivers is not negotiated yet, so ApplyRemoteDescriptionAsync() won't be able + // to find them and will re-create a new set of transceivers, leading to duplicates. + // So we wait until we know this peer is the offering side, and add transceivers to it right before + // creating an offer. The remote peer will then match the transceivers by index after it applied the offer, + // then add any missing one. + + // Update all transceivers, whether previously existing or just created above + var transceivers = _nativePeer.Transceivers; + int index = 0; + foreach (var mediaLine in _mediaLines) + { + // Ensure each media line has a transceiver + Transceiver tr = mediaLine.Transceiver; + if (tr != null) + { + // Media line already had a transceiver from a previous session negotiation + Debug.Assert(tr.MlineIndex >= 0); // associated + } + else + { + // Create new transceivers for a media line added since last session negotiation. + + // Compute the transceiver desired direction based on what the local peer expects, both in terms + // of sending and in terms of receiving. Note that this means the remote peer will not be able to + // send any data if the local peer did not add a remote source first. + // Tracks are not tested explicitly since the local track can be swapped on-the-fly without renegotiation, + // and the remote track is generally not added yet at the beginning of the negotiation, but only when + // the remote description is applied (so for the offering side, at the end of the exchange when the + // answer is received). + bool wantsSend = (mediaLine.Source != null); + bool wantsRecv = (mediaLine.Receiver != null); + var wantsDir = Transceiver.DirectionFromSendRecv(wantsSend, wantsRecv); + var settings = new TransceiverInitSettings + { + Name = $"mrsw#{index}", + InitialDesiredDirection = wantsDir + }; + tr = _nativePeer.AddTransceiver(mediaLine.MediaKind, settings); + try + { + mediaLine.PairTransceiver(tr); + } + catch (Exception ex) + { + LogErrorOnMediaLineException(ex, mediaLine, tr); + } + } + Debug.Assert(tr != null); + Debug.Assert(transceivers[index] == tr); + ++index; + } + + // Create the offer + AutoCreateOfferOnRenegotiationNeeded = true; + return _nativePeer.CreateOffer(); + } + + /// + /// Call and discard the result. Can be wired to a . + /// + public void StartConnectionIgnoreError() + { + _ = StartConnection(); + } + + /// + /// Pass the given SDP description received from the remote peer via signaling to the + /// underlying WebRTC implementation, which will parse and use it. + /// + /// This must be called by the signaler when receiving a message. Once this operation + /// has completed, it is safe to call . + /// + ///
+ ///
IMPORTANT
+ ///

+ /// This method is very similar to the SetRemoteDescriptionAsync() method available in the + /// underlying C# library, and actually calls it. However it also performs additional work in order + /// to pair the transceivers of the local and remote peer. Therefore Unity applications must call + /// this method instead of the C# library one to ensure transceiver pairing works as intended. + ///

+ ///
+ ///
+ /// The SDP message to handle. + /// A task which completes once the remote description has been applied and transceivers + /// have been updated. + /// The peer connection is not intialized. + /// + /// This method can only be called from the main Unity application thread, where Unity objects can + /// be safely accessed. + /// + public async Task HandleConnectionMessageAsync(SdpMessage message) + { + // MediaLine manipulates some MonoBehaviour objects when managing senders and receivers + EnsureIsMainAppThread(); + + if (!isActiveAndEnabled) + { + Debug.LogWarning("Message received by disabled PeerConnection"); + return; + } + + // First apply the remote description + try + { + await Peer.SetRemoteDescriptionAsync(message); + } + catch (Exception ex) + { + Debug.LogError($"Cannot apply remote description: {ex.Message}"); + } + + // Sort associated transceiver by media line index. The media line index is not the index of + // the transceiver, but they are both monotonically increasing, so sorting by one or the other + // yields the same ordered collection, which allows pairing transceivers and media lines. + // TODO - Ensure PeerConnection.Transceivers is already sorted + var transceivers = new List(_nativePeer.AssociatedTransceivers); + transceivers.Sort((tr1, tr2) => (tr1.MlineIndex - tr2.MlineIndex)); + int numAssociatedTransceivers = transceivers.Count; + int numMatching = Math.Min(numAssociatedTransceivers, _mediaLines.Count); + + // Once applied, try to pair transceivers and remote tracks with the Unity receiver components + if (message.Type == SdpMessageType.Offer) + { + // Match transceivers with media line, in order + for (int i = 0; i < numMatching; ++i) + { + var tr = transceivers[i]; + var mediaLine = _mediaLines[i]; + if (mediaLine.Transceiver == null) + { + mediaLine.PairTransceiver(tr); + } + else + { + Debug.Assert(tr == mediaLine.Transceiver); + } + + // Associate the transceiver with the media line, if not already done, and associate + // the track components of the media line to the tracks of the transceiver. + try + { + mediaLine.UpdateAfterSdpReceived(); + } + catch (Exception ex) + { + LogErrorOnMediaLineException(ex, mediaLine, tr); + } + + // Check if the remote peer was planning to send something to this peer, but cannot. + bool wantsRecv = (mediaLine.Receiver != null); + if (!wantsRecv) + { + var desDir = tr.DesiredDirection; + if (Transceiver.HasRecv(desDir)) + { + string peerName = name; + int idx = i; + InvokeOnAppThread(() => LogWarningOnMissingReceiver(peerName, idx)); + } + } + } + + // Ignore extra transceivers without a registered component to attach + if (numMatching < numAssociatedTransceivers) + { + string peerName = name; + InvokeOnAppThread(() => + { + for (int i = numMatching; i < numAssociatedTransceivers; ++i) + { + LogWarningOnIgnoredTransceiver(peerName, i); + } + }); + } + } + else if (message.Type == SdpMessageType.Answer) + { + // Associate registered media senders/receivers with existing transceivers + for (int i = 0; i < numMatching; ++i) + { + Transceiver tr = transceivers[i]; + var mediaLine = _mediaLines[i]; + Debug.Assert(mediaLine.Transceiver == transceivers[i]); + mediaLine.UpdateAfterSdpReceived(); + } + + // Ignore extra transceivers without a registered component to attach + if (numMatching < numAssociatedTransceivers) + { + string peerName = name; + InvokeOnAppThread(() => + { + for (int i = numMatching; i < numAssociatedTransceivers; ++i) + { + LogWarningOnIgnoredTransceiver(peerName, i); + } + }); + } + } + } + + /// + /// Uninitialize the underlying WebRTC library, effectively cleaning up the allocated peer connection. + /// + /// + /// will be null afterward. + /// + private void Uninitialize() + { + Debug.Assert(_nativePeer.Initialized); + // Fire signals before doing anything else to allow listeners to clean-up, + // including un-registering any callback from the connection. + OnShutdown.Invoke(); + + // Prevent publicly accessing the native peer after it has been deinitialized. + // This does not prevent systems caching a reference from accessing it, but it + // is their responsibility to check that the peer is initialized. + Peer = null; + + // Detach all transceivers. This prevents senders/receivers from trying to access + // them during their clean-up sequence, as transceivers are about to be destroyed + // by the native implementation. + foreach (var mediaLine in _mediaLines) + { + mediaLine.UnpairTransceiver(); + } + + // Close the connection and release native resources. + _nativePeer.Dispose(); + _nativePeer = null; + } + + #endregion + + + #region Unity MonoBehaviour methods + + protected override void Awake() + { + base.Awake(); + IsAwake = true; + foreach (var ml in _mediaLines) + { + ml.Awake(); + } + } + + /// + /// Unity Engine OnEnable() hook + /// + /// + /// See + /// + private void OnEnable() + { + if (AutoLogErrorsToUnityConsole) + { + OnError.AddListener(OnError_Listener); + } + InitializeAsync(); + } + + /// + /// Unity Engine OnDisable() hook + /// + /// + /// https://docs.unity3d.com/ScriptReference/MonoBehaviour.OnDisable.html + /// + private void OnDisable() + { + Uninitialize(); + OnError.RemoveListener(OnError_Listener); + } + + private void OnDestroy() + { + foreach (var ml in _mediaLines) + { + ml.OnDestroy(); + } + } + + #endregion + + + #region Private implementation + + public void OnBeforeSerialize() { } + + public void OnAfterDeserialize() + { + foreach (var ml in _mediaLines) + { + ml.Peer = this; + } + } + + /// + /// Create a new native peer connection and register event handlers to it. + /// This does not initialize the peer connection yet. + /// + private void CreateNativePeerConnection() + { + // Create the peer connection managed wrapper and its native implementation + _nativePeer = new WebRTC.PeerConnection(); + + _nativePeer.AudioTrackAdded += + (RemoteAudioTrack track) => + { + // Tracks will be output by AudioReceivers, so avoid outputting them twice. + track.OutputToDevice(false); + }; + } + + /// + /// Internal helper to ensure device access and continue initialization. + /// + /// + /// On UWP this must be called from the main UI thread. + /// + private Task RequestAccessAndInitAsync(CancellationToken token) + { +#if UNITY_WSA && !UNITY_EDITOR + // FIXME - Use ADM2 instead, this /maybe/ avoids this. + // On UWP the app must have the "microphone" capability, and the user must allow microphone + // access. This is due to the audio module (ADM1) being initialized at startup, even if no audio + // track is used. Preventing access to audio crashes the ADM1 at startup and the entire application. + var mediaAccessRequester = new MediaCapture(); + var mediaSettings = new MediaCaptureInitializationSettings(); + mediaSettings.AudioDeviceId = ""; + mediaSettings.VideoDeviceId = ""; + mediaSettings.StreamingCaptureMode = StreamingCaptureMode.Audio; + mediaSettings.PhotoCaptureSource = PhotoCaptureSource.VideoPreview; + mediaSettings.SharingMode = MediaCaptureSharingMode.SharedReadOnly; // for MRC and lower res camera + var accessTask = mediaAccessRequester.InitializeAsync(mediaSettings).AsTask(token); + return accessTask.ContinueWith(prevTask => + { + token.ThrowIfCancellationRequested(); + + if (prevTask.Exception == null) + { + InitializePluginAsync(token); + } + else + { + var ex = prevTask.Exception; + InvokeOnAppThread(() => OnError.Invoke($"Audio access failure: {ex.Message}.")); + } + }, token); +#else + return InitializePluginAsync(token); +#endif + } + + /// + /// Internal handler to actually initialize the plugin. + /// + private Task InitializePluginAsync(CancellationToken token) + { + Debug.Log("Initializing WebRTC plugin..."); + var config = new PeerConnectionConfiguration(); + foreach (var server in IceServers) + { + config.IceServers.Add(new IceServer + { + Urls = { server.ToString() }, + TurnUserName = IceUsername, + TurnPassword = IceCredential + }); + } + return _nativePeer.InitializeAsync(config, token).ContinueWith((initTask) => + { + token.ThrowIfCancellationRequested(); + + Exception ex = initTask.Exception; + if (ex != null) + { + InvokeOnAppThread(() => + { + var errorMessage = new StringBuilder(); + errorMessage.Append("WebRTC plugin initializing failed. See full log for exception details.\n"); + while (ex is AggregateException ae) + { + errorMessage.Append($"AggregationException: {ae.Message}\n"); + ex = ae.InnerException; + } + errorMessage.Append($"Exception: {ex.Message}"); + OnError.Invoke(errorMessage.ToString()); + }); + throw initTask.Exception; + } + + InvokeOnAppThread(OnPostInitialize); + }, token); + } + + /// + /// Callback fired on the main Unity app thread once the WebRTC plugin was initialized successfully. + /// + private void OnPostInitialize() + { + Debug.Log("WebRTC plugin initialized successfully."); + + if (AutoCreateOfferOnRenegotiationNeeded) + { + _nativePeer.RenegotiationNeeded += Peer_RenegotiationNeeded; + } + + // Once the peer is initialized, it becomes publicly accessible. + // This prevent scripts from accessing it before it is initialized. + Peer = _nativePeer; + + OnInitialized.Invoke(); + } + + private void Peer_RenegotiationNeeded() + { + // If already connected, update the connection on the fly. + // If not, wait for user action and don't automatically connect. + if (AutoCreateOfferOnRenegotiationNeeded && _nativePeer.IsConnected) + { + // Defer to the main app thread, because this implementation likely will + // again trigger the renegotiation needed event, which is not re-entrant. + // This also allows accessing Unity objects, and makes it safer in general + // for other objects. + InvokeOnAppThread(() => StartConnection()); + } + } + + /// + /// Internal handler for on-error, if is true + /// + /// The error message + private void OnError_Listener(string error) + { + Debug.LogError(error); + } + + /// + /// Log an error when receiving an exception related to a media line and transceiver pairing. + /// + /// The exception to log. + /// The media line associated with the exception. + /// The transceiver associated with the exception. + private void LogErrorOnMediaLineException(Exception ex, MediaLine mediaLine, Transceiver transceiver) + { + // Dispatch to main thread to access Unity objects to get their names + InvokeOnAppThread(() => + { + string msg; + if (ex is InvalidTransceiverMediaKindException) + { + msg = $"Peer connection \"{name}\" received {transceiver.MediaKind} transceiver #{transceiver.MlineIndex} \"{transceiver.Name}\", but local peer expected some {mediaLine.MediaKind} transceiver instead."; + if (mediaLine.Source != null) + { + msg += $" Sender \"{(mediaLine.Source as MonoBehaviour).name}\" will be ignored."; + } + if (mediaLine.Receiver != null) + { + msg += $" Receiver \"{(mediaLine.Receiver as MonoBehaviour).name}\" will be ignored."; + } + } + else + { + // Generic exception, log its message + msg = ex.Message; + } + Debug.LogError(msg); + }); + } + + private void LogWarningOnMissingReceiver(string peerName, int trIndex) + { + Debug.LogWarning($"The remote peer connected to the local peer connection '{peerName}' offered to send some media" + + $" through transceiver #{trIndex}, but the local peer connection '{peerName}' has no receiver component to" + + " process this media. The remote peer's media will be ignored. To be able to receive that media, ensure that" + + $" the local peer connection '{peerName}' has a receiver component associated with its transceiver #{trIndex}."); + } + + private void LogWarningOnIgnoredTransceiver(string peerName, int trIndex) + { + Debug.LogWarning($"The remote peer connected to the local peer connection '{peerName}' has transceiver #{trIndex}," + + " but the local peer connection doesn't have a local transceiver to pair with it. The remote peer's media for" + + " this transceiver will be ignored. To be able to receive that media, ensure that the local peer connection" + + $" '{peerName}' has transceiver #{trIndex} and a receiver component associated with it."); + } + + #endregion + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/PeerConnection.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/PeerConnection.cs.meta new file mode 100644 index 0000000..2c32099 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/PeerConnection.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 64dff3d25ffa2ee4f907d396ed9c1ab0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling.meta new file mode 100644 index 0000000..15af21c --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 08fef9ca84cf327498c54552b85581f5 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/LocalOnlySignaler.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/LocalOnlySignaler.cs new file mode 100644 index 0000000..92fe0ca --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/LocalOnlySignaler.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using UnityEngine; +using Microsoft.MixedReality.WebRTC.Unity; +using System.Threading; +using System.Collections; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Simple signaler using two peer connections in the same process, + /// and hard-coding their SDP message delivery to avoid the need for + /// any kind of networking to deliver SDP messages. + /// + /// This component is designed to be used in demos where both peers + /// are present in the same scene. + /// + public class LocalOnlySignaler : WorkQueue + { + /// + /// First peer to connect, which will generate an offer. + /// + public PeerConnection Peer1; + + /// + /// Second peer to connect, which will wait for an offer from the first peer. + /// + public PeerConnection Peer2; + + /// + /// Check if the last connection attempt successfully completed. This is reset to false each + /// time is called, and is updated after + /// returned to indicate if the connection succeeded. + /// + public bool IsConnected { get; private set; } = false; + + private ManualResetEventSlim _remoteApplied1 = new ManualResetEventSlim(); + private ManualResetEventSlim _remoteApplied2 = new ManualResetEventSlim(); + + /// + /// Initiate a connection by having send an offer to , + /// and wait until the SDP exchange completed. To wait for completion, use + /// then check the value of after that to determine if + /// terminated due to the connection being established or + /// if it timed out. + /// + /// true if the exchange started successfully, or false otherwise. + /// + public bool StartConnection() + { + EnsureIsMainAppThread(); + _remoteApplied1.Reset(); + _remoteApplied2.Reset(); + IsConnected = false; + return Peer1.StartConnection(); + } + + /// + /// Wait for the connection being established. + /// + /// Timeout in milliseconds to wait for the connection. + /// An enumerator used to yield while waiting. + /// + /// Assert.IsTrue(signaler.StartConnection()); + /// yield return signaler.WaitForConnection(millisecondsTimeout: 10000); + /// Assert.IsTrue(signaler.IsConnected); + /// + public IEnumerator WaitForConnection(int millisecondsTimeout) + { + float timeoutTime = Time.time + (millisecondsTimeout / 1000f); + while (true) + { + if (_remoteApplied1.IsSet && _remoteApplied2.IsSet) + { + IsConnected = true; + break; + } + if (Time.time >= timeoutTime) + { + break; + } + yield return null; + } + } + + private void Start() + { + Peer1.OnInitialized.AddListener(OnInitialized1); + Peer2.OnInitialized.AddListener(OnInitialized2); + } + + private void OnInitialized1() + { + Peer1.Peer.LocalSdpReadytoSend += Peer1_LocalSdpReadytoSend; + Peer1.Peer.IceCandidateReadytoSend += Peer1_IceCandidateReadytoSend; + } + + private void OnInitialized2() + { + Peer2.Peer.LocalSdpReadytoSend += Peer2_LocalSdpReadytoSend; + Peer2.Peer.IceCandidateReadytoSend += Peer2_IceCandidateReadytoSend; + } + + private void Peer1_LocalSdpReadytoSend(Microsoft.MixedReality.WebRTC.SdpMessage message) + { + InvokeOnAppThread(async () => + { + if (Peer2.Peer == null) + { + Debug.Log("Discarding SDP message for peer #2 (disabled)"); + return; + } + await Peer2.HandleConnectionMessageAsync(message); + _remoteApplied2.Set(); + if (message.Type == Microsoft.MixedReality.WebRTC.SdpMessageType.Offer) + { + Peer2.Peer.CreateAnswer(); + } + }); + } + + private void Peer2_LocalSdpReadytoSend(Microsoft.MixedReality.WebRTC.SdpMessage message) + { + InvokeOnAppThread(async () => + { + if (Peer1.Peer == null) + { + Debug.Log("Discarding SDP message for peer #1 (disabled)"); + return; + } + await Peer1.HandleConnectionMessageAsync(message); + _remoteApplied1.Set(); + if (message.Type == Microsoft.MixedReality.WebRTC.SdpMessageType.Offer) + { + Peer1.Peer.CreateAnswer(); + } + }); + } + + private void Peer1_IceCandidateReadytoSend(Microsoft.MixedReality.WebRTC.IceCandidate candidate) + { + if (Peer2.Peer == null) + { + Debug.Log("Discarding ICE message for peer #2 (disabled)"); + return; + } + Peer2.Peer.AddIceCandidate(candidate); + } + + private void Peer2_IceCandidateReadytoSend(Microsoft.MixedReality.WebRTC.IceCandidate candidate) + { + if (Peer1.Peer == null) + { + Debug.Log("Discarding ICE message for peer #1 (disabled)"); + return; + } + Peer1.Peer.AddIceCandidate(candidate); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/LocalOnlySignaler.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/LocalOnlySignaler.cs.meta new file mode 100644 index 0000000..122a240 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/LocalOnlySignaler.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e91aed6b074203740b7e047c9433c503 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/NodeDssSignaler.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/NodeDssSignaler.cs new file mode 100644 index 0000000..a76b5f4 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/NodeDssSignaler.cs @@ -0,0 +1,391 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections; +using System.Threading.Tasks; +using UnityEngine; +using UnityEngine.Networking; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Simple signaler for debug and testing. + /// This is based on https://github.com/bengreenier/node-dss and SHOULD NOT BE USED FOR PRODUCTION. + /// + [AddComponentMenu("MixedReality-WebRTC/NodeDSS Signaler")] + public class NodeDssSignaler : Signaler + { + /// + /// Automatically log all errors to the Unity console. + /// + [Tooltip("Automatically log all errors to the Unity console")] + public bool AutoLogErrors = true; + + /// + /// Unique identifier of the local peer. + /// + [Tooltip("Unique identifier of the local peer")] + public string LocalPeerId; + + /// + /// Unique identifier of the remote peer. + /// + [Tooltip("Unique identifier of the remote peer")] + public string RemotePeerId; + + /// + /// The https://github.com/bengreenier/node-dss HTTP service address to connect to + /// + [Header("Server")] + [Tooltip("The node-dss server to connect to")] + public string HttpServerAddress = "http://127.0.0.1:3000/"; + + /// + /// The interval (in ms) that the server is polled at + /// + [Tooltip("The interval (in ms) that the server is polled at")] + public float PollTimeMs = 500f; + + /// + /// Message exchanged with a node-dss server, serialized as JSON. + /// + /// + /// The names of the fields is critical here for proper JSON serialization. + /// + [Serializable] + private class NodeDssMessage + { + /// + /// Separator for ICE messages. + /// + public const string IceSeparatorChar = "|"; + + /// + /// Possible message types as-serialized on the wire to node-dss. + /// + public enum Type + { + /// + /// An unrecognized message. + /// + Unknown = 0, + + /// + /// A SDP offer message. + /// + Offer, + + /// + /// A SDP answer message. + /// + Answer, + + /// + /// A trickle-ice or ice message. + /// + Ice + } + + /// + /// Convert a message type from to . + /// + /// The message type as . + /// The message type as a object. + public static Type MessageTypeFromString(string stringType) + { + if (string.Equals(stringType, "offer", StringComparison.OrdinalIgnoreCase)) + { + return Type.Offer; + } + else if (string.Equals(stringType, "answer", StringComparison.OrdinalIgnoreCase)) + { + return Type.Answer; + } + throw new ArgumentException($"Unkown signaler message type '{stringType}'", "stringType"); + } + + public static Type MessageTypeFromSdpMessageType(SdpMessageType type) + { + switch (type) + { + case SdpMessageType.Offer: return Type.Offer; + case SdpMessageType.Answer: return Type.Answer; + default: return Type.Unknown; + } + } + + public IceCandidate ToIceCandidate() + { + if (MessageType != Type.Ice) + { + throw new InvalidOperationException("The node-dss message it not an ICE candidate message."); + } + var parts = Data.Split(new string[] { IceSeparatorChar }, StringSplitOptions.RemoveEmptyEntries); + // Note the inverted arguments; candidate is last in IceCandidate, but first in the node-dss wire message + return new IceCandidate + { + SdpMid = parts[2], + SdpMlineIndex = int.Parse(parts[1]), + Content = parts[0] + }; + } + + public NodeDssMessage(SdpMessage message) + { + MessageType = MessageTypeFromSdpMessageType(message.Type); + Data = message.Content; + IceDataSeparator = string.Empty; + } + + public NodeDssMessage(IceCandidate candidate) + { + MessageType = Type.Ice; + Data = string.Join(IceSeparatorChar, candidate.Content, candidate.SdpMlineIndex.ToString(), candidate.SdpMid); + IceDataSeparator = IceSeparatorChar; + } + + /// + /// The message type. + /// + public Type MessageType = Type.Unknown; + + /// + /// The primary message contents. + /// + public string Data; + + /// + /// The data separator needed for proper ICE serialization. + /// + public string IceDataSeparator; + } + + /// + /// Internal timing helper + /// + private float timeSincePollMs = 0f; + + /// + /// Internal last poll response status flag + /// + private bool lastGetComplete = true; + + + #region ISignaler interface + + /// + public override Task SendMessageAsync(SdpMessage message) + { + return SendMessageImplAsync(new NodeDssMessage(message)); + } + + /// + public override Task SendMessageAsync(IceCandidate candidate) + { + return SendMessageImplAsync(new NodeDssMessage(candidate)); + } + + #endregion + + private Task SendMessageImplAsync(NodeDssMessage message) + { + // This method needs to return a Task object which gets completed once the signaler message + // has been sent. Because the implementation uses a Unity coroutine, use a reset event to + // signal the task to complete from the coroutine after the message is sent. + // Note that the coroutine is a Unity object so needs to be started from the main Unity app thread. + // Also note that TaskCompletionSource is used as a no-result variant; there is no meaning + // to the bool value. + // https://stackoverflow.com/questions/11969208/non-generic-taskcompletionsource-or-alternative + var tcs = new TaskCompletionSource(); + _mainThreadWorkQueue.Enqueue(() => StartCoroutine(PostToServerAndWait(message, tcs))); + return tcs.Task; + } + + /// + /// Unity Engine Start() hook + /// + /// + /// https://docs.unity3d.com/ScriptReference/MonoBehaviour.Start.html + /// + private void Start() + { + if (string.IsNullOrEmpty(HttpServerAddress)) + { + throw new ArgumentNullException("HttpServerAddress"); + } + if (!HttpServerAddress.EndsWith("/")) + { + HttpServerAddress += "/"; + } + + // If not explicitly set, default local ID to some unique ID generated by Unity + if (string.IsNullOrEmpty(LocalPeerId)) + { + LocalPeerId = SystemInfo.deviceName; + } + } + + /// + /// Internal helper for sending HTTP data to the node-dss server using POST + /// + /// the message to send + private IEnumerator PostToServer(NodeDssMessage msg) + { + if (RemotePeerId.Length == 0) + { + throw new InvalidOperationException("Cannot send SDP message to remote peer; invalid empty remote peer ID."); + } + + var data = System.Text.Encoding.UTF8.GetBytes(JsonUtility.ToJson(msg)); + var www = new UnityWebRequest($"{HttpServerAddress}data/{RemotePeerId}", UnityWebRequest.kHttpVerbPOST); + www.uploadHandler = new UploadHandlerRaw(data); + + yield return www.SendWebRequest(); + + if (AutoLogErrors && (www.isNetworkError || www.isHttpError)) + { + Debug.Log($"Failed to send message to remote peer {RemotePeerId}: {www.error}"); + } + } + + /// + /// Internal helper to wrap a coroutine into a synchronous call for use inside + /// a object. + /// + /// the message to send + private IEnumerator PostToServerAndWait(NodeDssMessage message, TaskCompletionSource tcs) + { + yield return StartCoroutine(PostToServer(message)); + const bool dummy = true; // unused + tcs.SetResult(dummy); + } + + /// + /// Internal coroutine helper for receiving HTTP data from the DSS server using GET + /// and processing it as needed + /// + /// the message + private IEnumerator CO_GetAndProcessFromServer() + { + if (HttpServerAddress.Length == 0) + { + throw new InvalidOperationException("Cannot receive SDP messages from remote peer; invalid empty HTTP server address."); + } + if (LocalPeerId.Length == 0) + { + throw new InvalidOperationException("Cannot receive SDP messages from remote peer; invalid empty local peer ID."); + } + + var www = UnityWebRequest.Get($"{HttpServerAddress}data/{LocalPeerId}"); + yield return www.SendWebRequest(); + + if (!www.isNetworkError && !www.isHttpError) + { + var json = www.downloadHandler.text; + + var msg = JsonUtility.FromJson(json); + + // if the message is good + if (msg != null) + { + // depending on what type of message we get, we'll handle it differently + // this is the "glue" that allows two peers to establish a connection. + DebugLogLong($"Received SDP message: type={msg.MessageType} data={msg.Data}"); + switch (msg.MessageType) + { + case NodeDssMessage.Type.Offer: + // Apply the offer coming from the remote peer to the local peer + var sdpOffer = new WebRTC.SdpMessage { Type = SdpMessageType.Offer, Content = msg.Data }; + PeerConnection.HandleConnectionMessageAsync(sdpOffer).ContinueWith(_ => + { + // If the remote description was successfully applied then immediately send + // back an answer to the remote peer to acccept the offer. + _nativePeer.CreateAnswer(); + }, TaskContinuationOptions.OnlyOnRanToCompletion | TaskContinuationOptions.RunContinuationsAsynchronously); + break; + + case NodeDssMessage.Type.Answer: + // No need to wait for completion; there is nothing interesting to do after it. + var sdpAnswer = new WebRTC.SdpMessage { Type = SdpMessageType.Answer, Content = msg.Data }; + _ = PeerConnection.HandleConnectionMessageAsync(sdpAnswer); + break; + + case NodeDssMessage.Type.Ice: + // this "parts" protocol is defined above, in OnIceCandidateReadyToSend listener + _nativePeer.AddIceCandidate(msg.ToIceCandidate()); + break; + + default: + Debug.Log("Unknown message: " + msg.MessageType + ": " + msg.Data); + break; + } + + timeSincePollMs = PollTimeMs + 1f; //fast forward next request + } + else if (AutoLogErrors) + { + Debug.LogError($"Failed to deserialize JSON message : {json}"); + } + } + else if (AutoLogErrors && www.isNetworkError) + { + Debug.LogError($"Network error trying to send data to {HttpServerAddress}: {www.error}"); + } + else + { + // This is very spammy because the node-dss protocol uses 404 as regular "no data yet" message, which is an HTTP error + //Debug.LogError($"HTTP error: {www.error}"); + } + + lastGetComplete = true; + } + + /// + protected override void Update() + { + // Do not forget to call the base class Update(), which processes events from background + // threads to fire the callbacks implemented in this class. + base.Update(); + + // If we have not reached our PollTimeMs value... + if (timeSincePollMs <= PollTimeMs) + { + // ...then we keep incrementing our local counter until we do. + timeSincePollMs += Time.deltaTime * 1000.0f; + return; + } + + // If we have a pending request still going, don't queue another yet. + if (!lastGetComplete) + { + return; + } + + // When we have reached our PollTimeMs value... + timeSincePollMs = 0f; + + // ...begin the poll and process. + lastGetComplete = false; + StartCoroutine(CO_GetAndProcessFromServer()); + } + + private void DebugLogLong(string str) + { +#if !UNITY_EDITOR && UNITY_ANDROID + // On Android, logcat truncates to ~1000 characters, so split manually instead. + const int maxLineSize = 1000; + int totalLength = str.Length; + int numLines = (totalLength + maxLineSize - 1) / maxLineSize; + for (int i = 0; i < numLines; ++i) + { + int start = i * maxLineSize; + int length = Math.Min(start + maxLineSize, totalLength) - start; + Debug.Log(str.Substring(start, length)); + } +#else + Debug.Log(str); +#endif + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/NodeDssSignaler.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/NodeDssSignaler.cs.meta new file mode 100644 index 0000000..1e35ea2 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/NodeDssSignaler.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6f59b5937dc35994c87e058b9edfe08a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/Signaler.cs b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/Signaler.cs new file mode 100644 index 0000000..4e0c6c5 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/Signaler.cs @@ -0,0 +1,170 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections.Concurrent; +using System.Threading.Tasks; +using UnityEngine; + +namespace Microsoft.MixedReality.WebRTC.Unity +{ + /// + /// Abstract base class to simplify implementing a WebRTC signaling solution in Unity. + /// + /// There is no requirement to use this class as a base class for a custom implementation, + /// but it handles automatically registering the necessary + /// event handlers, as well as dispatching free-threaded callbacks to the main Unity app thread + /// for simplicity and safety, and leaves the implementation with instead with two sending methods + /// and to + /// implement, as well as handling received messages. + /// + public abstract class Signaler : MonoBehaviour + { + /// + /// The this signaler needs to work for. + /// + public PeerConnection PeerConnection; + + + #region Signaler interface + + /// + /// Asynchronously send an SDP message to the remote peer. + /// + /// The SDP message to send to the remote peer. + /// + /// A object completed once the message has been sent, + /// but not necessarily delivered. + /// + public abstract Task SendMessageAsync(SdpMessage message); + + /// + /// Asynchronously send an ICE candidate to the remote peer. + /// + /// The ICE candidate to send to the remote peer. + /// + /// A object completed once the message has been sent, + /// but not necessarily delivered. + /// + public abstract Task SendMessageAsync(IceCandidate candidate); + + #endregion + + + /// + /// Native object from the underlying + /// WebRTC C# library, available once the peer has been initialized. + /// + protected WebRTC.PeerConnection _nativePeer = null; + + /// + /// Task queue used to defer actions to the main Unity app thread, which is the only thread + /// with access to Unity objects. + /// + protected ConcurrentQueue _mainThreadWorkQueue = new ConcurrentQueue(); + + /// + /// Callback fired from the when it finished + /// initializing, to subscribe to signaling-related events. + /// + /// The peer connection to attach to + public void OnPeerInitialized() + { + _nativePeer = PeerConnection.Peer; + + // Register handlers for the SDP events + _nativePeer.IceCandidateReadytoSend += OnIceCandidateReadyToSend_Listener; + _nativePeer.LocalSdpReadytoSend += OnLocalSdpReadyToSend_Listener; + } + + /// + /// Callback fired from the before it starts + /// uninitializing itself and disposing of the underlying implementation object. + /// + /// The peer connection about to be deinitialized + public void OnPeerUninitializing() + { + // Unregister handlers for the SDP events + //_nativePeer.IceCandidateReadytoSend -= OnIceCandidateReadyToSend_Listener; + //_nativePeer.LocalSdpReadytoSend -= OnLocalSdpReadyToSend_Listener; + } + + private void OnIceCandidateReadyToSend_Listener(IceCandidate candidate) + { + _mainThreadWorkQueue.Enqueue(() => OnIceCandidateReadyToSend(candidate)); + } + + /// + /// Helper to split SDP offer and answer messages and dispatch to the appropriate handler. + /// + /// The SDP message ready to be sent to the remote peer. + private void OnLocalSdpReadyToSend_Listener(SdpMessage message) + { + if (message.Type == SdpMessageType.Offer) + { + _mainThreadWorkQueue.Enqueue(() => OnSdpOfferReadyToSend(message)); + } + else if (message.Type == SdpMessageType.Answer) + { + _mainThreadWorkQueue.Enqueue(() => OnSdpAnswerReadyToSend(message)); + } + } + + protected virtual void OnEnable() + { + PeerConnection.OnInitialized.AddListener(OnPeerInitialized); + PeerConnection.OnShutdown.AddListener(OnPeerUninitializing); + } + + /// + /// Unity Engine Update() hook + /// + /// + /// https://docs.unity3d.com/ScriptReference/MonoBehaviour.Update.html + /// + protected virtual void Update() + { + // Process workloads queued from background threads + while (_mainThreadWorkQueue.TryDequeue(out Action action)) + { + action(); + } + } + + protected virtual void OnDisable() + { + PeerConnection.OnInitialized.RemoveListener(OnPeerInitialized); + PeerConnection.OnShutdown.RemoveListener(OnPeerUninitializing); + } + + /// + /// Callback invoked when an ICE candidate message has been generated and is ready to + /// be sent to the remote peer by the signaling object. + /// + /// ICE candidate to send to the remote peer. + protected virtual void OnIceCandidateReadyToSend(IceCandidate candidate) + { + SendMessageAsync(candidate); + } + + /// + /// Callback invoked when a local SDP offer has been generated and is ready to + /// be sent to the remote peer by the signaling object. + /// + /// The SDP offer message to send. + protected virtual void OnSdpOfferReadyToSend(SdpMessage offer) + { + SendMessageAsync(offer); + } + + /// + /// Callback invoked when a local SDP answer has been generated and is ready to + /// be sent to the remote peer by the signaling object. + /// + /// The SDP answer message to send. + protected virtual void OnSdpAnswerReadyToSend(SdpMessage answer) + { + SendMessageAsync(answer); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/Signaler.cs.meta b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/Signaler.cs.meta new file mode 100644 index 0000000..450ac86 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Scripts/Signaling/Signaler.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d217f41795874b84581f68c25c190913 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders.meta b/com.microsoft.mixedreality.webrtc/Runtime/Shaders.meta new file mode 100644 index 0000000..b461685 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 1cf8f18fef40e97428685e1d23f83335 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShader.shader b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShader.shader new file mode 100644 index 0000000..fbe5862 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShader.shader @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +// Simple shader mapping an ARGB32 video feed using a Standard lit model. +Shader "Video/ARGBFeedShader (standard lit)" +{ + Properties + { + [Toggle(MIRROR)] _Mirror("Horizontal Mirror", Float) = 0 + [HideInEditor][NoScaleOffset] _MainTex("Main Tex", 2D) = "black" {} + } + SubShader + { + Tags { "RenderType" = "Opaque" } + + CGPROGRAM + + #pragma surface surf Lambert //alpha + #pragma multi_compile_instancing + #pragma multi_compile __ MIRROR + + struct Input { + float2 uv_MainTex; + }; + + // Texture containing the ARGB32 video frames + sampler2D _MainTex; + + void surf(Input IN, inout SurfaceOutput o) + { + // Flip texture coordinates vertically. + // Texture2D.LoadRawTextureData() always expects a bottom-up image, but the MediaPlayer + // upload code always get a top-down frame from WebRTC. The most efficient is to upload + // as is (inverted) and revert here. + IN.uv_MainTex.y = 1 - IN.uv_MainTex.y; + +#ifdef MIRROR + // Optional left-right mirroring (horizontal flipping) + IN.uv_MainTex.x = 1 - IN.uv_MainTex.x; +#endif + + o.Albedo = tex2D(_MainTex, IN.uv_MainTex).rgb; + o.Alpha = 1; + } + ENDCG + } + Fallback "Diffuse" +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShader.shader.meta b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShader.shader.meta new file mode 100644 index 0000000..116d88f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShader.shader.meta @@ -0,0 +1,9 @@ +fileFormatVersion: 2 +guid: 425a357788323ae4285a04cf1bfa216a +ShaderImporter: + externalObjects: {} + defaultTextures: [] + nonModifiableTextures: [] + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShaderUnlit.shader b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShaderUnlit.shader new file mode 100644 index 0000000..ca02621 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShaderUnlit.shader @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +// Simple shader mapping an ARGB32 video feed without any lighting model. +Shader "Video/ARGBFeedShader (unlit)" +{ + Properties + { + [Toggle(MIRROR)] _Mirror("Horizontal Mirror", Float) = 0 + [HideInEditor][NoScaleOffset] _MainTex("Main Tex", 2D) = "black" {} + } + SubShader + { + Pass + { + CGPROGRAM + #pragma vertex vert + #pragma fragment frag + #pragma multi_compile_instancing + #pragma multi_compile __ MIRROR + + #include "UnityCG.cginc" + + struct appdata + { + float4 vertex : POSITION; + float2 uv : TEXCOORD0; + UNITY_VERTEX_INPUT_INSTANCE_ID + }; + + struct v2f + { + float2 uv : TEXCOORD0; + float4 vertex : SV_POSITION; + UNITY_VERTEX_OUTPUT_STEREO + }; + + v2f vert (appdata v) + { + v2f o; + UNITY_SETUP_INSTANCE_ID(v); + UNITY_INITIALIZE_OUTPUT(v2f, o); + UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); + o.vertex = UnityObjectToClipPos(v.vertex); + o.uv = v.uv; + + // Flip texture coordinates vertically. + // Texture2D.LoadRawTextureData() always expects a bottom-up image, but the MediaPlayer + // upload code always get a top-down frame from WebRTC. The most efficient is to upload + // as is (inverted) and revert here. + o.uv.y = 1 - v.uv.y; + +#ifdef MIRROR + // Optional left-right mirroring (horizontal flipping) + o.uv.x = 1 - v.uv.x; +#endif + + return o; + } + + sampler2D _MainTex; + + fixed3 frag (v2f i) : SV_Target + { + return tex2D(_MainTex, i.uv).rgb; + } + ENDCG + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShaderUnlit.shader.meta b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShaderUnlit.shader.meta new file mode 100644 index 0000000..99c09f4 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/ARGBFeedShaderUnlit.shader.meta @@ -0,0 +1,9 @@ +fileFormatVersion: 2 +guid: 774ee07e70a065847b90a93e9c377d33 +ShaderImporter: + externalObjects: {} + defaultTextures: [] + nonModifiableTextures: [] + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShader.shader b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShader.shader new file mode 100644 index 0000000..1f942ff --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShader.shader @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +// Simple shader mapping a YUV video feed using a Standard lit model. +Shader "Video/YUVFeedShader (standard lit)" +{ + Properties + { + [Toggle(MIRROR)] _Mirror("Horizontal Mirror", Float) = 0 + [HideInEditor][NoScaleOffset] _YPlane("Y plane", 2D) = "black" {} + [HideInEditor][NoScaleOffset] _UPlane("U plane", 2D) = "gray" {} + [HideInEditor][NoScaleOffset] _VPlane("V plane", 2D) = "gray" {} + } + + SubShader + { + Tags { "RenderType" = "Opaque" } + CGPROGRAM + + #pragma surface surf Lambert //alpha + #pragma multi_compile_instancing + #pragma multi_compile __ MIRROR + + struct Input + { + float2 uv_YPlane; + }; + + sampler2D _YPlane; + sampler2D _UPlane; + sampler2D _VPlane; + + half3 yuv2rgb(half3 yuv) + { + // The YUV to RBA conversion, please refer to: http://en.wikipedia.org/wiki/YUV + // Y'UV420p (I420) to RGB888 conversion section. + half y_value = yuv[0]; + half u_value = yuv[1]; + half v_value = yuv[2]; + half r = y_value + 1.370705 * (v_value - 0.5); + half g = y_value - 0.698001 * (v_value - 0.5) - (0.337633 * (u_value - 0.5)); + half b = y_value + 1.732446 * (u_value - 0.5); + return half3(r, g, b); + } + + void surf(Input IN, inout SurfaceOutput o) + { + half3 yuv; + + // Flip texture coordinates vertically. + // Texture2D.LoadRawTextureData() always expects a bottom-up image, but the MediaPlayer + // upload code always get a top-down frame from WebRTC. The most efficient is to upload + // as is (inverted) and revert here. + IN.uv_YPlane.y = 1 - IN.uv_YPlane.y; + +#ifdef MIRROR + // Optional left-right mirroring (horizontal flipping) + IN.uv_YPlane.x = 1 - IN.uv_YPlane.x; +#endif + yuv.x = tex2D(_YPlane, IN.uv_YPlane).r; + yuv.y = tex2D(_UPlane, IN.uv_YPlane).r; + yuv.z = tex2D(_VPlane, IN.uv_YPlane).r; + o.Albedo = yuv2rgb(yuv); + } + + ENDCG + } + + Fallback "Diffuse" +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShader.shader.meta b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShader.shader.meta new file mode 100644 index 0000000..f65db6f --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShader.shader.meta @@ -0,0 +1,9 @@ +fileFormatVersion: 2 +guid: ae0a8c931a1f88d46bba888dbae84562 +ShaderImporter: + externalObjects: {} + defaultTextures: [] + nonModifiableTextures: [] + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShaderUnlit.shader b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShaderUnlit.shader new file mode 100644 index 0000000..3582f35 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShaderUnlit.shader @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +// Simple shader mapping a YUV video feed without any lighting model. +Shader "Video/YUVFeedShader (unlit)" +{ + Properties + { + [Toggle(MIRROR)] _Mirror("Horizontal Mirror", Float) = 0 + [HideInEditor][NoScaleOffset] _YPlane("Y plane", 2D) = "black" {} + [HideInEditor][NoScaleOffset] _UPlane("U plane", 2D) = "gray" {} + [HideInEditor][NoScaleOffset] _VPlane("V plane", 2D) = "gray" {} + } + SubShader + { + Pass + { + CGPROGRAM + #pragma vertex vert + #pragma fragment frag + #pragma multi_compile_instancing + #pragma multi_compile __ MIRROR + + #include "UnityCG.cginc" + + struct appdata + { + float4 vertex : POSITION; + float2 uv : TEXCOORD0; + UNITY_VERTEX_INPUT_INSTANCE_ID + }; + + struct v2f + { + float2 uv : TEXCOORD0; + float4 vertex : SV_POSITION; + UNITY_VERTEX_OUTPUT_STEREO + }; + + v2f vert (appdata v) + { + v2f o; + UNITY_SETUP_INSTANCE_ID(v); + UNITY_INITIALIZE_OUTPUT(v2f, o); + UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); + o.vertex = UnityObjectToClipPos(v.vertex); + o.uv = v.uv; + + // Flip texture coordinates vertically. + // Texture2D.LoadRawTextureData() always expects a bottom-up image, but the MediaPlayer + // upload code always get a top-down frame from WebRTC. The most efficient is to upload + // as is (inverted) and revert here. + o.uv.y = 1 - v.uv.y; + +#ifdef MIRROR + // Optional left-right mirroring (horizontal flipping) + o.uv.x = 1 - v.uv.x; +#endif + return o; + } + + sampler2D _YPlane; + sampler2D _UPlane; + sampler2D _VPlane; + + half3 yuv2rgb(half3 yuv) + { + // The YUV to RBA conversion, please refer to: http://en.wikipedia.org/wiki/YUV + // Y'UV420p (I420) to RGB888 conversion section. + half y_value = yuv[0]; + half u_value = yuv[1]; + half v_value = yuv[2]; + half r = y_value + 1.370705 * (v_value - 0.5); + half g = y_value - 0.698001 * (v_value - 0.5) - (0.337633 * (u_value - 0.5)); + half b = y_value + 1.732446 * (u_value - 0.5); + return half3(r, g, b); + } + + fixed3 frag (v2f i) : SV_Target + { + half3 yuv; + yuv.x = tex2D(_YPlane, i.uv).r; + yuv.y = tex2D(_UPlane, i.uv).r; + yuv.z = tex2D(_VPlane, i.uv).r; + return yuv2rgb(yuv); + } + ENDCG + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShaderUnlit.shader.meta b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShaderUnlit.shader.meta new file mode 100644 index 0000000..87d8e52 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Runtime/Shaders/YUVFeedShaderUnlit.shader.meta @@ -0,0 +1,9 @@ +fileFormatVersion: 2 +guid: f5704ed07eb02e5438d0b6f485061362 +ShaderImporter: + externalObjects: {} + defaultTextures: [] + nonModifiableTextures: [] + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests.meta b/com.microsoft.mixedreality.webrtc/Tests.meta new file mode 100644 index 0000000..8dcf985 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: cd131a1107634294eb2cc318b3be0698 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Editor.meta b/com.microsoft.mixedreality.webrtc/Tests/Editor.meta new file mode 100644 index 0000000..6f32380 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Editor.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 48a0d6d9535989245a232648f457b782 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Editor/EditorTests.cs b/com.microsoft.mixedreality.webrtc/Tests/Editor/EditorTests.cs new file mode 100644 index 0000000..afc7fec --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Editor/EditorTests.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Threading; +using NUnit.Framework; + +namespace Microsoft.MixedReality.WebRTC.Tests +{ + public class EditorTests + { + [Test] + public async void PeerConnectionDefault() + { + using (var pc = new PeerConnection()) + { + var config = new PeerConnectionConfiguration(); + await pc.InitializeAsync(config); + } + } + + //[Test] + //public void PeerConnectionWrongIceUrl() + //{ + // using (var pc = new PeerConnection()) + // { + // var config = new PeerConnectionConfiguration() + // { + // IceServers = { new IceServer { Urls = { "random url" } } } + // }; + // try + // { + // pc.InitializeAsync(config).ContinueWith((task) => { }); + // } + // catch (Exception _) + // { + // } + // } + //} + + private void WaitForSdpExchangeCompleted(ManualResetEventSlim completed) + { + Assert.True(completed.Wait(TimeSpan.FromSeconds(60.0))); + completed.Reset(); + } + + [Test] + public async void PeerConnectionLocalConnect() + { + using (var pc1 = new PeerConnection()) + { + await pc1.InitializeAsync(); + using (var pc2 = new PeerConnection()) + { + await pc2.InitializeAsync(); + + // Prepare SDP event handlers + var completed = new ManualResetEventSlim(initialState: false); + pc1.LocalSdpReadytoSend += async (SdpMessage message) => + { + // Send caller offer to callee + await pc2.SetRemoteDescriptionAsync(message); + Assert.AreEqual(SdpMessageType.Offer, message.Type); + pc2.CreateAnswer(); + }; + pc2.LocalSdpReadytoSend += async (SdpMessage message) => + { + // Send callee answer back to caller + await pc1.SetRemoteDescriptionAsync(message); + Assert.AreEqual(SdpMessageType.Answer, message.Type); + completed.Set(); + }; + pc1.IceCandidateReadytoSend += (IceCandidate candidate) => pc2.AddIceCandidate(candidate); + pc2.IceCandidateReadytoSend += (IceCandidate candidate) => pc1.AddIceCandidate(candidate); + + // Connect + pc1.CreateOffer(); + WaitForSdpExchangeCompleted(completed); + + pc1.Close(); + pc2.Close(); + } + } + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Tests/Editor/EditorTests.cs.meta b/com.microsoft.mixedreality.webrtc/Tests/Editor/EditorTests.cs.meta new file mode 100644 index 0000000..4bd15ef --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Editor/EditorTests.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6e350de801d18414192561aaf9888433 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Editor/Microsoft.MixedReality.WebRTC.Unity.Tests.Editor.asmdef b/com.microsoft.mixedreality.webrtc/Tests/Editor/Microsoft.MixedReality.WebRTC.Unity.Tests.Editor.asmdef new file mode 100644 index 0000000..af6fc13 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Editor/Microsoft.MixedReality.WebRTC.Unity.Tests.Editor.asmdef @@ -0,0 +1,20 @@ +{ + "name": "Microsoft.MixedReality.WebRTC.Unity.Tests.Editor", + "references": [ + "Microsoft.MixedReality.WebRTC.Unity" + ], + "optionalUnityReferences": [ + "TestAssemblies" + ], + "includePlatforms": [ + "Editor" + ], + "excludePlatforms": [], + "allowUnsafeCode": false, + "overrideReferences": false, + "precompiledReferences": [ + "Microsoft.MixedReality.WebRTC.dll" + ], + "autoReferenced": true, + "defineConstraints": [] +} \ No newline at end of file diff --git a/com.microsoft.mixedreality.webrtc/Tests/Editor/Microsoft.MixedReality.WebRTC.Unity.Tests.Editor.asmdef.meta b/com.microsoft.mixedreality.webrtc/Tests/Editor/Microsoft.MixedReality.WebRTC.Unity.Tests.Editor.asmdef.meta new file mode 100644 index 0000000..9a084a0 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Editor/Microsoft.MixedReality.WebRTC.Unity.Tests.Editor.asmdef.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 57333d218af61fe498bd609086437f95 +AssemblyDefinitionImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime.meta b/com.microsoft.mixedreality.webrtc/Tests/Runtime.meta new file mode 100644 index 0000000..320e5b9 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 884eb01a2c366724888b64ccce0b3475 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/MediaLineTests.cs b/com.microsoft.mixedreality.webrtc/Tests/Runtime/MediaLineTests.cs new file mode 100644 index 0000000..2645a3b --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/MediaLineTests.cs @@ -0,0 +1,297 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using NUnit.Framework; +using UnityEngine; +using UnityEngine.TestTools; + +namespace Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime +{ + public class MockVideoSource : CustomVideoSource + { + protected override void OnFrameRequested(in FrameRequest request) + { + var data = new uint[16 * 16]; + for (int k = 0; k < 256; ++k) + { + data[k] = 0xFF0000FFu; + } + unsafe + { + fixed (uint* ptr = data) + { + var frame = new Argb32VideoFrame + { + data = new IntPtr(ptr), + width = 16, + height = 16, + stride = 64 + }; + request.CompleteRequest(frame); + } + } + } + } + + public class MediaLineTests + { + [SetUp] + public void Setup() + { + } + + [TearDown] + public void Shutdown() + { + // Note - this runs before OnDisabled/OnDestroy, so will always report false positives + Library.ReportLiveObjects(); + } + + class DummyAudioSource : MediaTrackSource + { + public override MediaKind MediaKind => MediaKind.Audio; + public override bool IsLive => true; + } + + private MediaLine CreateMediaLine(PeerConnection pc) + { + MediaLine mediaLine = pc.AddMediaLine(MediaKind.Video); + Assert.IsNotNull(mediaLine); + Assert.AreEqual(MediaKind.Video, mediaLine.MediaKind); + Assert.IsNull(mediaLine.Transceiver); // no connection + Assert.IsNull(mediaLine.Source); + Assert.IsNull(mediaLine.Receiver); + Assert.IsNull(mediaLine.LocalTrack); + return mediaLine; + } + + private IEnumerator InitializePeer(PeerConnection pc, GameObject pc_go) + { + var ev = new ManualResetEventSlim(initialState: false); + pc.OnInitialized.AddListener(() => ev.Set()); + pc_go.SetActive(true); + // Wait for the peer connection to be initalize; this generally takes + // at least 2 frames, one for the SetActive() to execute and one for + // the OnInitialized() event to propagate. + while (!ev.Wait(millisecondsTimeout: 200)) + { + yield return null; + } + Assert.IsNotNull(pc.Peer); + } + + private IEnumerator CreateMediaLineTest(bool initFirst) + { + // Create the peer connections + var pc_go = new GameObject("pc1"); + pc_go.SetActive(false); // prevent auto-activation of components + var pc = pc_go.AddComponent(); + + MediaLine mediaLine; + if (initFirst) + { + // Initialize the peer connection + yield return InitializePeer(pc, pc_go); + + // Create the media line + mediaLine = CreateMediaLine(pc); + } + else + { + // Create the media line + mediaLine = CreateMediaLine(pc); + + // Initialize the peer connection + yield return InitializePeer(pc, pc_go); + + // No change + Assert.IsNull(mediaLine.Transceiver); // no connection + } + + // Create an offer (which won't succeed as there's no signaler, but that doesn't matter) + Assert.IsTrue(pc.StartConnection()); + + // The transceiver was created by the implementation and assigned to the media line + Assert.IsNotNull(mediaLine.Transceiver); + Assert.AreEqual(mediaLine.Transceiver.MediaKind, mediaLine.MediaKind); + + UnityEngine.Object.Destroy(pc_go); + } + + [UnityTest(/*Description = "Add a media line to a peer connection before it is initialized"*/)] + public IEnumerator CreateBeforePeerInit() + { + return CreateMediaLineTest(initFirst: false); + } + + [UnityTest(/*Description = "Add a media line to a peer connection after it is initialized"*/)] + public IEnumerator CreateAfterPeerInit() + { + return CreateMediaLineTest(initFirst: true); + } + + [UnityTest(/*Description = "MediaLine.Source"*/)] + public IEnumerator SetSource() + { + // Create the peer connections + var pc_go = new GameObject("pc1"); + var pc = pc_go.AddComponent(); + + // Create some video track sources + VideoTrackSource source1 = pc_go.AddComponent(); + VideoTrackSource source2 = pc_go.AddComponent(); + Assert.AreEqual(0, source1.MediaLines.Count); + Assert.AreEqual(0, source2.MediaLines.Count); + + // Create the media line + MediaLine mediaLine = pc.AddMediaLine(MediaKind.Video); + + // Assign a video source to the media line + mediaLine.Source = source1; + Assert.AreEqual(mediaLine.Source, source1); + Assert.AreEqual(1, source1.MediaLines.Count); + Assert.IsTrue(source1.MediaLines.Contains(mediaLine)); + + // No-op + mediaLine.Source = source1; + + // Assign another video source to the media line + mediaLine.Source = source2; + Assert.AreEqual(mediaLine.Source, source2); + Assert.AreEqual(0, source1.MediaLines.Count); + Assert.IsFalse(source1.MediaLines.Contains(mediaLine)); + Assert.AreEqual(1, source2.MediaLines.Count); + Assert.IsTrue(source2.MediaLines.Contains(mediaLine)); + + // Remove it from the media line + mediaLine.Source = null; + Assert.IsNull(mediaLine.Source); + Assert.AreEqual(0, source2.MediaLines.Count); + Assert.IsFalse(source2.MediaLines.Contains(mediaLine)); + + // No-op + mediaLine.Source = null; + + // Set an invalid source (wrong media kind) + Assert.Throws(() => mediaLine.Source = pc_go.AddComponent()); + + UnityEngine.Object.Destroy(pc_go); + + // Terminate the coroutine. + yield return null; + } + + [UnityTest(/*Description = "MediaLine.Receiver"*/)] + public IEnumerator SetReceiver() + { + // Create the peer connections + var pc_go = new GameObject("pc1"); + var pc = pc_go.AddComponent(); + + // Create some video track sources + VideoReceiver receiver1 = pc_go.AddComponent(); + VideoReceiver receiver2 = pc_go.AddComponent(); + Assert.IsNull(receiver1.MediaLine); + Assert.IsNull(receiver2.MediaLine); + + // Create the media line + MediaLine mediaLine = pc.AddMediaLine(MediaKind.Video); + + // Assign a video source to the media line + mediaLine.Receiver = receiver1; + Assert.AreEqual(mediaLine.Receiver, receiver1); + Assert.AreEqual(receiver1.MediaLine, mediaLine); + + // No-op + mediaLine.Receiver = receiver1; + + // Assign another video source to the media line + mediaLine.Receiver = receiver2; + Assert.AreEqual(mediaLine.Receiver, receiver2); + Assert.IsNull(receiver1.MediaLine); + Assert.AreEqual(receiver2.MediaLine, mediaLine); + + // Remove it from the media line + mediaLine.Receiver = null; + Assert.IsNull(mediaLine.Receiver); + Assert.IsNull(receiver2.MediaLine); + + // No-op + mediaLine.Receiver = null; + + // Set an invalid source (wrong media kind) + Assert.Throws(() => mediaLine.Receiver = pc_go.AddComponent()); + + UnityEngine.Object.Destroy(pc_go); + + // Terminate the coroutine. + yield return null; + } + + [UnityTest] + public IEnumerator DestroyPeerConnection() + { + // Create the component + var go = new GameObject("test_go"); + go.SetActive(false); // prevent auto-activation of components + var pc = go.AddComponent(); + + // Add a media line while inactive. + VideoTrackSource source1 = go.AddComponent(); + VideoReceiver receiver1 = go.AddComponent(); + MediaLine ml1 = pc.AddMediaLine(MediaKind.Video); + ml1.Source = source1; + ml1.Receiver = receiver1; + + // Media lines have not been set yet. + Assert.IsEmpty(source1.MediaLines); + Assert.IsNull(receiver1.MediaLine); + + yield return PeerConnectionTests.InitializeAndWait(pc); + + // Media lines have been set now. + Assert.AreEqual(source1.MediaLines.Single(), ml1); + Assert.AreEqual(receiver1.MediaLine, ml1); + + // Add a media line while active. + VideoReceiver receiver2 = go.AddComponent(); + MediaLine ml2 = pc.AddMediaLine(MediaKind.Video); + ml2.Source = source1; + ml2.Receiver = receiver2; + + // Media line #2 is connected. + Assert.AreEqual(source1.MediaLines[1], ml2); + Assert.AreEqual(receiver2.MediaLine, ml2); + + // Disable the peer. + pc.enabled = false; + + // Add a media line while disabled. + VideoReceiver receiver3 = go.AddComponent(); + MediaLine ml3 = pc.AddMediaLine(MediaKind.Video); + ml3.Source = source1; + ml3.Receiver = receiver3; + + // Media line #3 is connected. + Assert.AreEqual(source1.MediaLines[2], ml3); + Assert.AreEqual(receiver3.MediaLine, ml3); + + // Destroy the peer (wait a frame for destruction). + UnityEngine.Object.Destroy(pc); + yield return null; + + // Source and receivers are not connected anymore. + Assert.IsEmpty(source1.MediaLines); + Assert.IsNull(receiver1.MediaLine); + Assert.IsNull(receiver2.MediaLine); + Assert.IsNull(receiver3.MediaLine); + + UnityEngine.Object.Destroy(go); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/MediaLineTests.cs.meta b/com.microsoft.mixedreality.webrtc/Tests/Runtime/MediaLineTests.cs.meta new file mode 100644 index 0000000..68dd677 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/MediaLineTests.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: be381db07c5f0484a94e966867011010 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime.asmdef b/com.microsoft.mixedreality.webrtc/Tests/Runtime/Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime.asmdef new file mode 100644 index 0000000..14d4ad2 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime.asmdef @@ -0,0 +1,18 @@ +{ + "name": "Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime", + "references": [ + "Microsoft.MixedReality.WebRTC.Unity" + ], + "optionalUnityReferences": [ + "TestAssemblies" + ], + "includePlatforms": [], + "excludePlatforms": [], + "allowUnsafeCode": true, + "overrideReferences": false, + "precompiledReferences": [ + "Microsoft.MixedReality.WebRTC.dll" + ], + "autoReferenced": true, + "defineConstraints": [] +} \ No newline at end of file diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime.asmdef.meta b/com.microsoft.mixedreality.webrtc/Tests/Runtime/Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime.asmdef.meta new file mode 100644 index 0000000..58757ff --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime.asmdef.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 179cfdd31f655ac49852b543a3192c64 +AssemblyDefinitionImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/PeerConnectionTests.cs b/com.microsoft.mixedreality.webrtc/Tests/Runtime/PeerConnectionTests.cs new file mode 100644 index 0000000..8165d16 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/PeerConnectionTests.cs @@ -0,0 +1,191 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System; +using System.Collections; +using System.Linq; +using NUnit.Framework; +using UnityEngine; +using UnityEngine.Events; +using UnityEngine.TestTools; + +namespace Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime +{ + public class PeerConnectionTests + { + public static IEnumerator InitializeAndWait(PeerConnection pc) + { + Debug.Assert(!pc.isActiveAndEnabled); + + // Subscribe to the event. + bool isInitialized = false; + UnityAction listener = () => isInitialized = true; + pc.OnInitialized.AddListener(listener); + + // Activate + if (!pc.gameObject.activeSelf) + { + pc.gameObject.SetActive(true); + } + if (!pc.enabled) + { + pc.enabled = true; + } + + // Check the event was raised + var timeout = DateTime.Now + TimeSpan.FromSeconds(10); + yield return new WaitUntil(() => isInitialized || DateTime.Now > timeout); + pc.OnInitialized.RemoveListener(listener); + Assert.IsTrue(isInitialized); + Assert.IsNotNull(pc.Peer); + } + + public static IEnumerator ShutdownAndCheckEvent(PeerConnection pc) + { + // Subscribe to the event. + bool isShutdown = false; + UnityAction listener = () => isShutdown = true; + pc.OnShutdown.AddListener(listener); + + pc.enabled = false; + Assert.IsNull(pc.Peer); + + // Check the event was raised + var timeout = DateTime.Now + TimeSpan.FromSeconds(10); + yield return new WaitUntil(() => isShutdown || DateTime.Now > timeout); + pc.OnShutdown.RemoveListener(listener); + Assert.IsTrue(isShutdown); + } + + [SetUp] + public void Setup() + { + } + + [TearDown] + public void Shutdown() + { + } + + private void VerifyLocalShutdown(MediaLine ml) + { + // The source is not impacted, but tracks and transceiver are gone. + Assert.IsTrue(ml.Source.IsLive); + Assert.IsFalse(ml.Receiver.IsLive); + Assert.IsNull(ml.LocalTrack); + Assert.IsNull(ml.Transceiver); + } + + [UnityTest] + public IEnumerator SimplePeerConnection() + { + // Create the component + var go = new GameObject("test_go"); + go.SetActive(false); // prevent auto-activation of components + var pc = go.AddComponent(); + Assert.IsNull(pc.Peer); + + // Initialize + yield return InitializeAndWait(pc); + UnityEngine.Object.Destroy(go); + } + + [UnityTest] + public IEnumerator EnableAndDisable() + { + // Create the component + var go = new GameObject("test_go"); + go.SetActive(false); // prevent auto-activation of components + var pc = go.AddComponent(); + Assert.IsNull(pc.Peer); + for (int i = 0; i < 2; ++i) + { + yield return InitializeAndWait(pc); + yield return ShutdownAndCheckEvent(pc); + } + + UnityEngine.Object.Destroy(go); + } + + [UnityTest] + public IEnumerator EnableAndDisableWithTracks() + { + var pc1_go = new GameObject("pc1"); + pc1_go.SetActive(false); // prevent auto-activation of components + var pc1 = pc1_go.AddComponent(); + var pc2_go = new GameObject("pc2"); + pc2_go.SetActive(false); // prevent auto-activation of components + var pc2 = pc2_go.AddComponent(); + + // Create the signaler + var sig_go = new GameObject("signaler"); + var sig = sig_go.AddComponent(); + sig.Peer1 = pc1; + sig.Peer2 = pc2; + + // Create the video source on peer #1 + VideoTrackSource source1 = pc1_go.AddComponent(); + VideoReceiver receiver1 = pc1_go.AddComponent(); + MediaLine ml1 = pc1.AddMediaLine(MediaKind.Video); + ml1.SenderTrackName = "video_track_1"; + ml1.Source = source1; + ml1.Receiver = receiver1; + + // Create the video source on peer #2 + VideoTrackSource source2 = pc2_go.AddComponent(); + VideoReceiver receiver2 = pc2_go.AddComponent(); + MediaLine ml2 = pc2.AddMediaLine(MediaKind.Video); + ml2.SenderTrackName = "video_track_2"; + ml2.Source = source2; + ml2.Receiver = receiver2; + + // Init/quit twice. + for (int i = 0; i < 2; ++i) + { + // Initialize + yield return InitializeAndWait(pc1); + yield return InitializeAndWait(pc2); + + // Confirm the sources are ready. + Assert.IsTrue(source1.IsLive); + Assert.IsTrue(source2.IsLive); + + // Sender tracks will be created on connection. + Assert.IsNull(ml1.LocalTrack); + Assert.IsNull(ml2.LocalTrack); + + // Connect + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 10000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check pairing + Assert.IsNotNull(receiver1.Transceiver); + Assert.IsTrue(receiver1.IsLive); + Assert.AreEqual(1, pc1.Peer.RemoteVideoTracks.Count()); + Assert.IsNotNull(receiver2.Transceiver); + Assert.IsTrue(receiver2.IsLive); + Assert.AreEqual(1, pc2.Peer.RemoteVideoTracks.Count()); + + // Shutdown peer #1 + pc1.enabled = false; + Assert.IsNull(pc1.Peer); + + // We cannot reliably detect remote shutdown, so only check local peer. + VerifyLocalShutdown(ml1); + + // Shutdown peer #2 + pc2.enabled = false; + Assert.IsNull(pc2.Peer); + + VerifyLocalShutdown(ml2); + } + UnityEngine.Object.Destroy(pc1_go); + UnityEngine.Object.Destroy(pc2_go); + UnityEngine.Object.Destroy(sig_go); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/PeerConnectionTests.cs.meta b/com.microsoft.mixedreality.webrtc/Tests/Runtime/PeerConnectionTests.cs.meta new file mode 100644 index 0000000..b364c6a --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/PeerConnectionTests.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 14b0c5aeafb2c8c48ad0027e603f2d15 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSenderTests.cs b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSenderTests.cs new file mode 100644 index 0000000..1be2d30 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSenderTests.cs @@ -0,0 +1,92 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Collections; +using System.Linq; +using NUnit.Framework; +using UnityEngine; +using UnityEngine.TestTools; + +namespace Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime +{ + public class VideoSenderTests + { + [SetUp] + public void Setup() + { + } + + [TearDown] + public void Shutdown() + { + // Note - this runs before OnDisabled/OnDestroy, so will always report false positives + Library.ReportLiveObjects(); + } + + [UnityTest(/*Description = "Capture starts automatically when the component is activated"*/)] + public IEnumerator CaptureStartsOnActivate() + { + // Create the peer connections + var pc_go = new GameObject("pc1"); + pc_go.SetActive(false); // prevent auto-activation of components + var pc = pc_go.AddComponent(); + + // Batch changes manually + pc.AutoCreateOfferOnRenegotiationNeeded = false; + + // Create the video track source + VideoTrackSource source = pc_go.AddComponent(); + + // Create the media line + MediaLine ml = pc.AddMediaLine(MediaKind.Video); + ml.SenderTrackName = "track_name"; + + // Assign the video source to the media line + ml.Source = source; + + // MediaLine has not been connected yet. + Assert.IsEmpty(source.MediaLines); + + // Add event handlers to check IsStreaming state + source.VideoStreamStarted.AddListener((IVideoSource self) => + { + // Becomes true *before* this handler by design + Assert.IsTrue(source.IsLive); + }); + source.VideoStreamStopped.AddListener((IVideoSource self) => + { + // Still true until *after* this handler by design + Assert.IsTrue(source.IsLive); + }); + + // Confirm the source is not capturing yet because the component is inactive + Assert.IsFalse(source.IsLive); + + // Confirm the sender has no track because the component is inactive + Assert.IsNull(ml.LocalTrack); + + // Activate the game object and the video track source component on it + pc_go.SetActive(true); + + // MediaLine is connected. + Assert.AreEqual(source.MediaLines.Single(), ml); + + // Confirm the sender is capturing because the component is now active + Assert.IsTrue(source.IsLive); + + // Confirm the sender still has no track because there's no connection + Assert.IsNull(ml.LocalTrack); + + // Deactivate the game object and the video track source component on it + pc_go.SetActive(false); + + // Confirm the source stops streaming + Assert.IsFalse(source.IsLive); + + Object.Destroy(pc_go); + + // Terminate the coroutine. + yield return null; + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSenderTests.cs.meta b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSenderTests.cs.meta new file mode 100644 index 0000000..333317e --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSenderTests.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d296c693cc9b4db4285f4ae8bd7b5d38 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSourceTests.cs b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSourceTests.cs new file mode 100644 index 0000000..bab7114 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSourceTests.cs @@ -0,0 +1,1013 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +using System.Collections; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using NUnit.Framework; +using UnityEditor.SceneManagement; +using UnityEngine; +using UnityEngine.TestTools; + +namespace Microsoft.MixedReality.WebRTC.Unity.Tests.Runtime +{ + public class VideoSourceTests + { + [SetUp] + public void Setup() + { + } + + [TearDown] + public void Shutdown() + { + // Force shutdown in case a test failure prevented cleaning-up some + // native resources, thereby locking the native module and preventing + // it from being unloaded/reloaded in the Unity editor. + Library.ReportLiveObjects(); + //Library.ForceShutdown(); + } + + private IEnumerator SingleTwoWaysImpl(bool withSender1, bool withReceiver1, bool withSender2, bool withReceiver2) + { + // Create the peer connections + var pc1_go = new GameObject("pc1"); + pc1_go.SetActive(false); // prevent auto-activation of components + var pc1 = pc1_go.AddComponent(); + var pc2_go = new GameObject("pc2"); + pc2_go.SetActive(false); // prevent auto-activation of components + var pc2 = pc2_go.AddComponent(); + + // Batch changes manually + pc1.AutoCreateOfferOnRenegotiationNeeded = false; + pc2.AutoCreateOfferOnRenegotiationNeeded = false; + + // Create the signaler + var sig_go = new GameObject("signaler"); + var sig = sig_go.AddComponent(); + sig.Peer1 = pc1; + sig.Peer2 = pc2; + + // Create the video sources on peer #1 + VideoTrackSource source1 = null; + VideoReceiver receiver1 = null; + if (withSender1) + { + source1 = pc1_go.AddComponent(); + } + if (withReceiver1) + { + receiver1 = pc1_go.AddComponent(); + } + MediaLine ml1 = pc1.AddMediaLine(MediaKind.Video); + ml1.SenderTrackName = "video_track_1"; + ml1.Source = source1; + ml1.Receiver = receiver1; + + // Create the video sources on peer #2 + VideoTrackSource source2 = null; + VideoReceiver receiver2 = null; + if (withSender2) + { + source2 = pc2_go.AddComponent(); + } + if (withReceiver2) + { + receiver2 = pc1_go.AddComponent(); + } + MediaLine ml2 = pc2.AddMediaLine(MediaKind.Video); + ml2.SenderTrackName = "video_track_2"; + ml2.Source = source2; + ml2.Receiver = receiver2; + + // Initialize + yield return PeerConnectionTests.InitializeAndWait(pc1); + yield return PeerConnectionTests.InitializeAndWait(pc2); + + // Confirm the sources are ready + if (withSender1) + { + Assert.IsTrue(source1.IsLive); + + } + if (withSender2) + { + Assert.IsTrue(source2.IsLive); + } + + // Confirm the sender track is not created yet; it will be when the connection starts + Assert.IsNull(ml1.LocalTrack); + Assert.IsNull(ml2.LocalTrack); + + // Confirm the receiver track is not added yet, since remote tracks are only instantiated + // as the result of a session negotiation. + if (withReceiver1) + { + Assert.IsNull(receiver1.Track); + } + if (withReceiver2) + { + Assert.IsNull(receiver2.Track); + } + + // Connect + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 10000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check pairing + { + bool hasSend1 = false; + bool hasSend2 = false; + bool hasRecv1 = false; + bool hasRecv2 = false; + + // Local tracks exist if manually added (independently of negotiation) + Assert.AreEqual(withSender1 ? 1 : 0, pc1.Peer.LocalVideoTracks.Count()); + Assert.AreEqual(withSender2 ? 1 : 0, pc2.Peer.LocalVideoTracks.Count()); + + // Remote tracks exist if paired with a sender on the remote peer + if (withReceiver1 && withSender2) // R <= S + { + Assert.IsNotNull(receiver1.Track); + Assert.IsNotNull(ml2.LocalTrack); + hasRecv1 = true; + hasSend2 = true; + } + if (withSender1 && withReceiver2) // S => R + { + Assert.IsNotNull(ml1.LocalTrack); + Assert.IsNotNull(receiver2.Track); + hasSend1 = true; + hasRecv2 = true; + } + Assert.AreEqual(hasRecv1 ? 1 : 0, pc1.Peer.RemoteVideoTracks.Count()); + Assert.AreEqual(hasRecv2 ? 1 : 0, pc2.Peer.RemoteVideoTracks.Count()); + + // Transceivers are consistent with pairing + Assert.IsTrue(ml1.Transceiver.NegotiatedDirection.HasValue); + Assert.AreEqual(hasSend1, Transceiver.HasSend(ml1.Transceiver.NegotiatedDirection.Value)); + Assert.AreEqual(hasRecv1, Transceiver.HasRecv(ml1.Transceiver.NegotiatedDirection.Value)); + Assert.IsTrue(ml2.Transceiver.NegotiatedDirection.HasValue); + Assert.AreEqual(hasSend2, Transceiver.HasSend(ml2.Transceiver.NegotiatedDirection.Value)); + Assert.AreEqual(hasRecv2, Transceiver.HasRecv(ml2.Transceiver.NegotiatedDirection.Value)); + } + + Object.Destroy(pc1_go); + Object.Destroy(pc2_go); + Object.Destroy(sig_go); + } + + [UnityTest] + public IEnumerator SingleMissingAll() // _ = _ + { + yield return SingleTwoWaysImpl(withSender1: false, withReceiver1: false, withSender2: false, withReceiver2: false); + } + + [UnityTest] + public IEnumerator SingleOneWay() // S => R + { + yield return SingleTwoWaysImpl(withSender1: true, withReceiver1: false, withSender2: false, withReceiver2: true); + } + + [UnityTest] + public IEnumerator SingleOneWayMissingRecvOffer() // S = _ + { + yield return SingleTwoWaysImpl(withSender1: true, withReceiver1: false, withSender2: false, withReceiver2: false); + } + + [UnityTest] + public IEnumerator SingleOneWayMissingSenderOffer() // _ = R + { + yield return SingleTwoWaysImpl(withSender1: false, withReceiver1: false, withSender2: false, withReceiver2: true); + } + + [UnityTest] + public IEnumerator SingleTwoWaysMissingSenderOffer() // _R <= SR + { + yield return SingleTwoWaysImpl(withSender1: false, withReceiver1: true, withSender2: true, withReceiver2: true); + } + + [UnityTest] + public IEnumerator SingleTwoWaysMissingReceiverOffer() // SR <= S_ + { + yield return SingleTwoWaysImpl(withSender1: true, withReceiver1: true, withSender2: true, withReceiver2: false); + } + + [UnityTest] + public IEnumerator SingleTwoWaysMissingReceiverAnswer() // S_ => SR + { + yield return SingleTwoWaysImpl(withSender1: true, withReceiver1: false, withSender2: true, withReceiver2: true); + } + + [UnityTest] + public IEnumerator SingleTwoWaysMissingSenderAnswer() // SR => _R + { + yield return SingleTwoWaysImpl(withSender1: true, withReceiver1: true, withSender2: false, withReceiver2: true); + } + + [UnityTest] + public IEnumerator SingleTwoWays() // SR <=> SR + { + yield return SingleTwoWaysImpl(withSender1: true, withReceiver1: true, withSender2: true, withReceiver2: true); + } + + class PeerConfig + { + // Input + public Transceiver.Direction desiredDirection; + public MediaLine mediaLine; + public UniformColorVideoSource source; + public VideoReceiver receiver; + + // Output + public bool expectSender; + public bool expectReceiver; + } + + class MultiConfig + { + public PeerConfig peer1; + public PeerConfig peer2; + }; + + [UnityTest] + public IEnumerator Multi() + { + // Create the peer connections + var pc1_go = new GameObject("pc1"); + pc1_go.SetActive(false); // prevent auto-activation of components + var pc1 = pc1_go.AddComponent(); + var pc2_go = new GameObject("pc2"); + pc2_go.SetActive(false); // prevent auto-activation of components + var pc2 = pc2_go.AddComponent(); + + // Batch changes manually + pc1.AutoCreateOfferOnRenegotiationNeeded = false; + pc2.AutoCreateOfferOnRenegotiationNeeded = false; + + // Create the signaler + var sig_go = new GameObject("signaler"); + var sig = sig_go.AddComponent(); + sig.Peer1 = pc1; + sig.Peer2 = pc2; + + // Create the senders and receivers + // P1 P2 + // 0 : S => R + // 1 : SR <=> SR + // 2 : S => SR + // 3 : R <= SR + // 4 : S => R + + const int NumTransceivers = 5; + + // P1 has 4 senders added to it + int numLocal1 = 4; + + // P1 receives 2 tracks from the 3 P2 senders (one is refused) + int numRemote1 = 2; + + // P2 has 3 senders added to it + int numLocal2 = 3; + + // P2 receives 4 tracks from the 4 P1 senders + int numRemote2 = 4; + + var cfgs = new MultiConfig[NumTransceivers] + { + new MultiConfig { + peer1 = new PeerConfig { + desiredDirection = Transceiver.Direction.SendOnly, + expectSender = true, + expectReceiver = false, + }, + peer2 = new PeerConfig { + desiredDirection = Transceiver.Direction.ReceiveOnly, + expectSender = false, + expectReceiver = true, + } + }, + new MultiConfig { + peer1 = new PeerConfig { + desiredDirection = Transceiver.Direction.SendReceive, + expectSender = true, + expectReceiver = true, + }, + peer2 = new PeerConfig { + desiredDirection = Transceiver.Direction.SendReceive, + expectSender = true, + expectReceiver = true, + }, + }, + new MultiConfig { + peer1 = new PeerConfig { + desiredDirection = Transceiver.Direction.SendOnly, + expectSender = true, + expectReceiver = false, + }, + peer2 = new PeerConfig { + desiredDirection = Transceiver.Direction.SendReceive, + expectSender = true, + expectReceiver = true, + }, + }, + new MultiConfig { + peer1 = new PeerConfig { + desiredDirection = Transceiver.Direction.ReceiveOnly, + expectSender = false, + expectReceiver = true, + }, + peer2 = new PeerConfig { + desiredDirection = Transceiver.Direction.SendReceive, + expectSender = true, + expectReceiver = false, + }, + }, + new MultiConfig { + peer1 = new PeerConfig { + desiredDirection = Transceiver.Direction.SendOnly, + expectSender = true, + expectReceiver = false, + }, + peer2 = new PeerConfig { + desiredDirection = Transceiver.Direction.ReceiveOnly, + expectSender = false, + expectReceiver = true, + }, + }, + }; + for (int i = 0; i < NumTransceivers; ++i) + { + var cfg = cfgs[i]; + + { + MediaLine ml1 = pc1.AddMediaLine(MediaKind.Video); + var peer = cfg.peer1; + peer.mediaLine = ml1; + if (Transceiver.HasSend(peer.desiredDirection)) + { + var source1 = pc1_go.AddComponent(); + peer.source = source1; + ml1.Source = source1; + ml1.SenderTrackName = $"track{i}"; + } + if (Transceiver.HasRecv(peer.desiredDirection)) + { + var receiver1 = pc1_go.AddComponent(); + peer.receiver = receiver1; + ml1.Receiver = receiver1; + } + } + + { + MediaLine ml2 = pc2.AddMediaLine(MediaKind.Video); + var peer = cfg.peer2; + peer.mediaLine = ml2; + if (Transceiver.HasSend(peer.desiredDirection)) + { + var source2 = pc2_go.AddComponent(); + peer.source = source2; + ml2.Source = source2; + ml2.SenderTrackName = $"track{i}"; + } + if (Transceiver.HasRecv(peer.desiredDirection)) + { + var receiver2 = pc2_go.AddComponent(); + peer.receiver = receiver2; + ml2.Receiver = receiver2; + } + } + } + + // Initialize + yield return PeerConnectionTests.InitializeAndWait(pc1); + yield return PeerConnectionTests.InitializeAndWait(pc2); + + // Confirm the sources are ready + for (int i = 0; i < NumTransceivers; ++i) + { + var cfg = cfgs[i]; + if (cfg.peer1.expectSender) + { + Assert.IsNotNull(cfg.peer1.source, $"Missing source #{i} on Peer #1"); + Assert.IsTrue(cfg.peer1.source.IsLive, $"Source #{i} is not ready on Peer #1"); + Assert.IsNull(cfg.peer1.mediaLine.LocalTrack); // created during connection + } + if (cfg.peer2.expectSender) + { + Assert.IsNotNull(cfg.peer2.source, $"Missing source #{i} on Peer #2"); + Assert.IsTrue(cfg.peer2.source.IsLive, $"Source #{i} is not ready on Peer #2"); + Assert.IsNull(cfg.peer2.mediaLine.LocalTrack); // created during connection + } + } + + // Connect + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check pairing + Assert.AreEqual(numLocal1, pc1.Peer.LocalVideoTracks.Count()); + Assert.AreEqual(numRemote1, pc1.Peer.RemoteVideoTracks.Count()); + Assert.AreEqual(numLocal2, pc2.Peer.LocalVideoTracks.Count()); + Assert.AreEqual(numRemote2, pc2.Peer.RemoteVideoTracks.Count()); + for (int i = 0; i < NumTransceivers; ++i) + { + var cfg = cfgs[i]; + if (cfg.peer1.expectSender) + { + Assert.IsNotNull(cfg.peer1.mediaLine.LocalTrack, $"Transceiver #{i} missing local sender track on Peer #1"); + } + if (cfg.peer1.expectReceiver) + { + Assert.IsNotNull(cfg.peer1.receiver.Track, $"Transceiver #{i} missing remote track on Peer #1"); + } + if (cfg.peer2.expectSender) + { + Assert.IsNotNull(cfg.peer2.mediaLine.LocalTrack, $"Transceiver #{i} missing local sender track on Peer #2"); + } + if (cfg.peer2.expectReceiver) + { + Assert.IsNotNull(cfg.peer2.receiver.Track, $"Transceiver #{i} Missing remote track on Peer #2"); + } + } + + // Change the senders and receivers and transceivers direction + // old new + // P1 P2 P1 P2 + // 0 : S => R = R P1 stops sending + // 1 : SR <=> SR SR => R P2 stops sending + // 2 : S => SR SR <=> SR P1 starts receiving + // 3 : R <= SR SR <=> SR P1 starts sending + // 4 : S => R S = P2 stops receiving + + numLocal1 = 4; + numRemote1 = 2; + numLocal2 = 2; + numRemote2 = 3; + + // #0 - P1 stops sending + { + var cfg = cfgs[0]; + cfg.peer1.mediaLine.Source = null; + cfg.peer1.expectSender = false; + cfg.peer1.expectReceiver = false; + cfg.peer2.expectSender = false; + cfg.peer2.expectReceiver = false; + } + + // #1 - P2 stops sending + { + var cfg = cfgs[1]; + cfg.peer2.mediaLine.Source = null; + cfg.peer1.expectSender = true; + cfg.peer1.expectReceiver = false; + cfg.peer2.expectSender = false; + cfg.peer2.expectReceiver = true; + } + + // #2 - P1 starts receiving + { + var cfg = cfgs[2]; + var receiver2 = pc2_go.AddComponent(); + cfg.peer1.receiver = receiver2; + cfg.peer1.mediaLine.Receiver = receiver2; + cfg.peer1.expectSender = true; + cfg.peer1.expectReceiver = true; + cfg.peer2.expectSender = true; + cfg.peer2.expectReceiver = true; + } + + // #3 - P1 starts sending + { + var cfg = cfgs[3]; + var source1 = pc1_go.AddComponent(); + cfg.peer1.source = source1; + cfg.peer1.mediaLine.Source = source1; + cfg.peer1.expectSender = true; + cfg.peer1.expectReceiver = true; + cfg.peer2.expectSender = true; + cfg.peer2.expectReceiver = true; + } + + // #4 - P2 stops receiving + { + var cfg = cfgs[4]; + cfg.peer2.mediaLine.Receiver = null; + cfg.peer1.expectSender = false; + cfg.peer1.expectReceiver = false; + cfg.peer2.expectSender = false; + cfg.peer2.expectReceiver = false; + } + + // Renegotiate + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check pairing + Assert.AreEqual(numLocal1, pc1.Peer.LocalVideoTracks.Count()); + Assert.AreEqual(numRemote1, pc1.Peer.RemoteVideoTracks.Count()); + Assert.AreEqual(numLocal2, pc2.Peer.LocalVideoTracks.Count()); + Assert.AreEqual(numRemote2, pc2.Peer.RemoteVideoTracks.Count()); + for (int i = 0; i < NumTransceivers; ++i) + { + var cfg = cfgs[i]; + if (cfg.peer1.expectReceiver) + { + Assert.IsNotNull(cfg.peer1.receiver.Track, $"Transceiver #{i} missing remote track on Peer #1"); + } + if (cfg.peer2.expectReceiver) + { + Assert.IsNotNull(cfg.peer2.receiver.Track, $"Transceiver #{i} Missing remote track on Peer #2"); + } + } + + Object.Destroy(pc1_go); + Object.Destroy(pc2_go); + Object.Destroy(sig_go); + } + + /// Negotiate 3 sessions in a row: + /// P1 P2 + /// a. S => R One way P1 => P2 + /// b. = R Stop sending; transceiver goes to inactive + /// c. S => R Resume sending; transceiver re-activate + [UnityTest] + public IEnumerator Negotiate_SxS_to_R() + { + // Create the peer connections + var pc1_go = new GameObject("pc1"); + pc1_go.SetActive(false); // prevent auto-activation of components + var pc1 = pc1_go.AddComponent(); + var pc2_go = new GameObject("pc2"); + pc2_go.SetActive(false); // prevent auto-activation of components + var pc2 = pc2_go.AddComponent(); + + // Batch changes manually + pc1.AutoCreateOfferOnRenegotiationNeeded = false; + pc2.AutoCreateOfferOnRenegotiationNeeded = false; + + // Create the signaler + var sig_go = new GameObject("signaler"); + var sig = sig_go.AddComponent(); + sig.Peer1 = pc1; + sig.Peer2 = pc2; + + // Create the sender video source + var source1 = pc1_go.AddComponent(); + //source1.SenderTrackName = "track_name"; + MediaLine ml1 = pc1.AddMediaLine(MediaKind.Video); + Assert.IsNotNull(ml1); + Assert.AreEqual(MediaKind.Video, ml1.MediaKind); + ml1.Source = source1; + + // Create the receiver video source + var receiver2 = pc2_go.AddComponent(); + MediaLine ml2 = pc2.AddMediaLine(MediaKind.Video); + Assert.IsNotNull(ml2); + Assert.AreEqual(MediaKind.Video, ml2.MediaKind); + ml2.Receiver = receiver2; + + // Initialize + yield return PeerConnectionTests.InitializeAndWait(pc1); + yield return PeerConnectionTests.InitializeAndWait(pc2); + + // Confirm the source is ready, but the sender track is not created yet + + Assert.IsTrue(source1.IsLive); + Assert.IsNull(ml1.LocalTrack); + + // Connect + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check transceiver update + Assert.IsNotNull(ml1.Transceiver); // first negotiation creates this + Assert.IsNotNull(ml2.Transceiver); // first negotiation creates this + Assert.AreEqual(Transceiver.Direction.SendOnly, ml1.Transceiver.DesiredDirection); + Assert.AreEqual(Transceiver.Direction.ReceiveOnly, ml2.Transceiver.DesiredDirection); + Assert.IsTrue(ml1.Transceiver.NegotiatedDirection.HasValue); + Assert.IsTrue(ml2.Transceiver.NegotiatedDirection.HasValue); + Assert.AreEqual(Transceiver.Direction.SendOnly, ml1.Transceiver.NegotiatedDirection.Value); + Assert.AreEqual(Transceiver.Direction.ReceiveOnly, ml2.Transceiver.NegotiatedDirection.Value); + var video_tr1 = ml1.Transceiver; + Assert.IsNotNull(video_tr1); + Assert.AreEqual(MediaKind.Video, video_tr1.MediaKind); + var video_tr2 = ml2.Transceiver; + Assert.IsNotNull(video_tr2); + Assert.AreEqual(MediaKind.Video, video_tr2.MediaKind); + + // Check track pairing + Assert.IsNotNull(ml1.LocalTrack); // created during connection + Assert.IsNotNull(receiver2.Track); // paired + Assert.AreEqual(video_tr1.LocalTrack, ml1.LocalTrack); // sender attached + Assert.AreEqual(video_tr2.RemoteTrack, receiver2.Track); // receiver paired + + // ====== Remove sender ============================== + + // Remove the sender from #1 + ml1.Source = null; + + // Renegotiate + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check transceiver update + Assert.IsNotNull(ml1.Transceiver); // immutable + Assert.IsNotNull(ml2.Transceiver); // immutable + Assert.AreEqual(Transceiver.Direction.Inactive, ml1.Transceiver.DesiredDirection); // set ml1 sender to null above + Assert.AreEqual(Transceiver.Direction.ReceiveOnly, ml2.Transceiver.DesiredDirection); // no change + Assert.IsTrue(ml1.Transceiver.NegotiatedDirection.HasValue); + Assert.IsTrue(ml2.Transceiver.NegotiatedDirection.HasValue); + Assert.AreEqual(Transceiver.Direction.Inactive, ml1.Transceiver.NegotiatedDirection.Value); // desired + Assert.AreEqual(Transceiver.Direction.Inactive, ml2.Transceiver.NegotiatedDirection.Value); // inactive * recvonly = inactive + Assert.AreEqual(video_tr1, ml1.Transceiver); // immutable + Assert.AreEqual(video_tr2, ml2.Transceiver); // immutable + + // Check track pairing + Assert.IsNull(ml1.LocalTrack); // no source, so media line destroyed its sender track... + Assert.IsNull(video_tr1.LocalTrack); // ...after detaching it from the transceiver + Assert.IsNull(receiver2.Track); // transceiver is inactive and remote tracks are not owned + Assert.IsNull(video_tr2.RemoteTrack); // unpaired + + // ====== Re-add sender ============================== + + // Re-add the sender on #1 + ml1.Source = source1; + + // Renegotiate + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check transceiver update + Assert.IsNotNull(ml1.Transceiver); // immutable + Assert.IsNotNull(ml2.Transceiver); // immutable + Assert.AreEqual(Transceiver.Direction.SendOnly, ml1.Transceiver.DesiredDirection); // set ml1 sender above + Assert.AreEqual(Transceiver.Direction.ReceiveOnly, ml2.Transceiver.DesiredDirection); // no change + Assert.IsTrue(ml1.Transceiver.NegotiatedDirection.HasValue); + Assert.IsTrue(ml2.Transceiver.NegotiatedDirection.HasValue); + Assert.AreEqual(Transceiver.Direction.SendOnly, ml1.Transceiver.NegotiatedDirection.Value); // desired + Assert.AreEqual(Transceiver.Direction.ReceiveOnly, ml2.Transceiver.NegotiatedDirection.Value); // accepted + Assert.AreEqual(video_tr1, ml1.Transceiver); // immutable + Assert.AreEqual(video_tr2, ml2.Transceiver); // immutable + + // Check track pairing + Assert.IsNotNull(ml1.LocalTrack); // new source again, media line re-created a sender track + Assert.IsNotNull(ml1.LocalTrack.Transceiver); // ...and attached it to the transceiver... + Assert.AreEqual(video_tr1, ml1.LocalTrack.Transceiver); + Assert.IsNotNull(video_tr1.LocalTrack); // ...and conversely + Assert.AreEqual(ml1.LocalTrack, video_tr1.LocalTrack); + Assert.IsNotNull(receiver2.Track); // transceiver is active again and remote track was re-created + Assert.IsNotNull(receiver2.Track.Transceiver); + Assert.AreEqual(video_tr2, receiver2.Track.Transceiver); + Assert.IsNotNull(video_tr2.RemoteTrack); // re-paired + Assert.AreEqual(receiver2.Track, video_tr2.RemoteTrack); + + Object.Destroy(pc1_go); + Object.Destroy(pc2_go); + Object.Destroy(sig_go); + } + + /// + /// Test interleaving of media transceivers and data channels, which produce a discontinuity in + /// the media line indices of the media transceivers since data channels also consume some media + /// line. This test ensures the transceiver indexing and pairing is robust to those discontinuities. + /// + [UnityTest] + public IEnumerator InterleavedMediaAndData() + { + // Create the peer connections + var pc1_go = new GameObject("pc1"); + pc1_go.SetActive(false); // prevent auto-activation of components + var pc1 = pc1_go.AddComponent(); + var pc2_go = new GameObject("pc2"); + pc2_go.SetActive(false); // prevent auto-activation of components + var pc2 = pc2_go.AddComponent(); + + // Batch changes manually + pc1.AutoCreateOfferOnRenegotiationNeeded = false; + pc2.AutoCreateOfferOnRenegotiationNeeded = false; + + // Create the signaler + var sig_go = new GameObject("signaler"); + var sig = sig_go.AddComponent(); + sig.Peer1 = pc1; + sig.Peer2 = pc2; + + // Create the sender video source + var source1 = pc1_go.AddComponent(); + //sender1.SenderTrackName = "track_name"; + MediaLine ml1 = pc1.AddMediaLine(MediaKind.Video); + Assert.IsNotNull(ml1); + ml1.Source = source1; + + // Create the receiver video source + var receiver2 = pc2_go.AddComponent(); + MediaLine ml2 = pc2.AddMediaLine(MediaKind.Video); + Assert.IsNotNull(ml2); + ml2.Receiver = receiver2; + + // Initialize + yield return PeerConnectionTests.InitializeAndWait(pc1); + yield return PeerConnectionTests.InitializeAndWait(pc2); + + // Confirm the source is ready, but the sender track is not created yet + Assert.IsTrue(source1.IsLive); + Assert.IsNull(ml1.LocalTrack); + + // Create some dummy out-of-band data channel to force SCTP negotiation + // during the first offer, and be able to add some in-band data channels + // later via subsequent SDP session negotiations. + { + Task t1 = pc1.Peer.AddDataChannelAsync(42, "dummy", ordered: true, reliable: true); + Task t2 = pc2.Peer.AddDataChannelAsync(42, "dummy", ordered: true, reliable: true); + Assert.IsTrue(t1.Wait(millisecondsTimeout: 10000)); + Assert.IsTrue(t2.Wait(millisecondsTimeout: 10000)); + } + + // Connect + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check transceiver update + var video_tr1 = ml1.Transceiver; + Assert.IsNotNull(video_tr1); + var video_tr2 = ml2.Transceiver; + Assert.IsNotNull(video_tr2); + Assert.AreEqual(0, video_tr1.MlineIndex); + Assert.AreEqual(0, video_tr2.MlineIndex); + + // ====== Add in-band data channel ==================================== + + // Add an in-band data channel on peer #1 + DataChannel dc1; + { + Task t1 = pc1.Peer.AddDataChannelAsync("test_data_channel", ordered: true, reliable: true); + Assert.IsTrue(t1.Wait(millisecondsTimeout: 10000)); + dc1 = t1.Result; + } + + // Prepare to receive a new data channel on peer #2 + DataChannel dc2 = null; + var dc2_added_ev = new ManualResetEventSlim(initialState: false); + pc2.Peer.DataChannelAdded += (DataChannel channel) => { dc2 = channel; dc2_added_ev.Set(); }; + + // Renegotiate; data channel will consume media line #1 + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Do not assume that connecting is enough to get the data channel, as callbacks are + // asynchronously invoked. Instead explicitly wait for the created event to be raised. + Assert.IsTrue(dc2_added_ev.Wait(millisecondsTimeout: 10000)); + + // Check the data channel is ready + Assert.IsNotNull(dc2); + Assert.AreEqual(dc1.ID, dc2.ID); + Assert.AreEqual(DataChannel.ChannelState.Open, dc1.State); + Assert.AreEqual(DataChannel.ChannelState.Open, dc2.State); + + // ====== Add an extra media transceiver ============================== + + // Create the receiver video source + var receiver1b = pc1_go.AddComponent(); + MediaLine ml1b = pc1.AddMediaLine(MediaKind.Video); + Assert.IsNotNull(ml1b); + ml1b.Receiver = receiver1b; + + // Create the sender video source + var source2b = pc2_go.AddComponent(); + //sender2b.SenderTrackName = "track_name_2"; + MediaLine ml2b = pc2.AddMediaLine(MediaKind.Video); + Assert.IsNotNull(ml2b); + ml2b.Source = source2b; + + // Renegotiate + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 60000); + Assert.IsTrue(sig.IsConnected); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // Check transceiver update + var video_tr1b = ml1b.Transceiver; + Assert.IsNotNull(video_tr1b); + var video_tr2b = ml2b.Transceiver; + Assert.IsNotNull(video_tr2b); + Assert.AreEqual(2, video_tr1b.MlineIndex); + Assert.AreEqual(2, video_tr2b.MlineIndex); + + Object.Destroy(pc1_go); + Object.Destroy(pc2_go); + Object.Destroy(sig_go); + } + + [UnityTest] + public IEnumerator SwapSource() + { + // Create the peer connections + var pc1_go = new GameObject("pc1"); + pc1_go.SetActive(false); // prevent auto-activation of components + var pc1 = pc1_go.AddComponent(); + var pc2_go = new GameObject("pc2"); + pc2_go.SetActive(false); // prevent auto-activation of components + var pc2 = pc2_go.AddComponent(); + + // Batch changes manually + pc1.AutoCreateOfferOnRenegotiationNeeded = false; + pc2.AutoCreateOfferOnRenegotiationNeeded = false; + + // Create the signaler + var sig_go = new GameObject("signaler"); + var sig = sig_go.AddComponent(); + sig.Peer1 = pc1; + sig.Peer2 = pc2; + + // Create the video sources on peer #1 + VideoTrackSource source1 = pc1_go.AddComponent(); + VideoTrackSource source2 = pc1_go.AddComponent(); + + MediaLine ml = pc1.AddMediaLine(MediaKind.Video); + ml.SenderTrackName = "video_track_1"; + ml.Source = source1; + + // Create the receiver on peer #2 + { + VideoReceiver receiver = pc2_go.AddComponent(); + MediaLine receiverMl = pc2.AddMediaLine(MediaKind.Video); + receiverMl.Receiver = receiver; + } + + // Initialize + yield return PeerConnectionTests.InitializeAndWait(pc1); + yield return PeerConnectionTests.InitializeAndWait(pc2); + + // Connect + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 10000); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // source1 is correctly wired. + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.SendOnly); + Assert.AreEqual(pc1.Peer.LocalVideoTracks.Count(), 1); + Assert.IsNotNull(ml.LocalTrack); + Assert.AreEqual(((LocalVideoTrack)ml.LocalTrack).Source, source1.Source); + Assert.AreEqual(source1.MediaLines.Single(), ml); + + // Reset source + ml.Source = null; + + // source1 has been detached. + Assert.IsNull(ml.LocalTrack); + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.Inactive); + Assert.AreEqual(pc1.Peer.LocalVideoTracks.Count(), 0); + Assert.IsEmpty(source1.MediaLines); + + // Set source2. + ml.Source = source2; + + // source2 is correctly wired. + Assert.IsNotNull(ml.LocalTrack); + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.SendOnly); + Assert.AreEqual(pc1.Peer.LocalVideoTracks.Count(), 1); + Assert.AreEqual(source2.MediaLines.Single(), ml); + Assert.AreEqual(((LocalVideoTrack)ml.LocalTrack).Source, source2.Source); + + // Swap source2 with source1. + ml.Source = source1; + + // source1 is correctly wired. + Assert.IsNotNull(ml.LocalTrack); + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.SendOnly); + Assert.AreEqual(pc1.Peer.LocalVideoTracks.Count(), 1); + Assert.AreEqual(source1.MediaLines.Single(), ml); + Assert.AreEqual(((LocalVideoTrack)ml.LocalTrack).Source, source1.Source); + + // source2 has been detached. + Assert.IsEmpty(source2.MediaLines); + + Object.Destroy(pc1_go); + Object.Destroy(pc2_go); + } + + [UnityTest] + public IEnumerator SwapReceiver() + { + // Create the peer connections + var pc1_go = new GameObject("pc1"); + pc1_go.SetActive(false); // prevent auto-activation of components + var pc1 = pc1_go.AddComponent(); + var pc2_go = new GameObject("pc2"); + pc2_go.SetActive(false); // prevent auto-activation of components + var pc2 = pc2_go.AddComponent(); + + // Batch changes manually + pc1.AutoCreateOfferOnRenegotiationNeeded = false; + pc2.AutoCreateOfferOnRenegotiationNeeded = false; + + // Create the signaler + var sig_go = new GameObject("signaler"); + var sig = sig_go.AddComponent(); + sig.Peer1 = pc1; + sig.Peer2 = pc2; + + // Create the video source on peer #1 + { + VideoTrackSource source = pc1_go.AddComponent(); + MediaLine senderMl = pc1.AddMediaLine(MediaKind.Video); + senderMl.SenderTrackName = "video_track_1"; + senderMl.Source = source; + } + + // Create the receivers on peer #2 + VideoReceiver receiver1 = pc2_go.AddComponent(); + VideoReceiver receiver2 = pc2_go.AddComponent(); + MediaLine ml = pc2.AddMediaLine(MediaKind.Video); + ml.Receiver = receiver1; + + // Initialize + yield return PeerConnectionTests.InitializeAndWait(pc1); + yield return PeerConnectionTests.InitializeAndWait(pc2); + + // Connect + Assert.IsTrue(sig.StartConnection()); + yield return sig.WaitForConnection(millisecondsTimeout: 10000); + + // Wait a frame so that the Unity events for streams started can propagate + yield return null; + + // receiver1 is correctly wired. + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.ReceiveOnly); + Assert.AreEqual(pc2.Peer.RemoteVideoTracks.Count(), 1); + Assert.IsTrue(receiver1.IsLive); + Assert.AreEqual(receiver1.Track, ml.Transceiver.RemoteTrack); + Assert.AreEqual(receiver1.MediaLine, ml); + + // Reset receiver + ml.Receiver = null; + + // receiver1 has been detached. + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.Inactive); + Assert.IsFalse(receiver1.IsLive); + Assert.IsNull(receiver1.MediaLine); + + // Set receiver2. + ml.Receiver = receiver2; + + // receiver2 is correctly wired. + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.ReceiveOnly); + Assert.AreEqual(pc2.Peer.RemoteVideoTracks.Count(), 1); + Assert.IsTrue(receiver2.IsLive); + Assert.AreEqual(receiver2.Track, ml.Transceiver.RemoteTrack); + Assert.AreEqual(receiver2.MediaLine, ml); + + // Swap receiver2 with receiver1. + ml.Receiver = receiver1; + + // receiver1 is correctly wired. + Assert.AreEqual(ml.Transceiver.DesiredDirection, Transceiver.Direction.ReceiveOnly); + Assert.AreEqual(pc2.Peer.RemoteVideoTracks.Count(), 1); + Assert.IsTrue(receiver1.IsLive); + Assert.AreEqual(receiver1.Track, ml.Transceiver.RemoteTrack); + Assert.AreEqual(receiver1.MediaLine, ml); + + // receiver2 has been detached. + Assert.IsFalse(receiver2.IsLive); + Assert.IsNull(receiver2.MediaLine); + + Object.Destroy(pc1_go); + Object.Destroy(pc2_go); + } + } +} diff --git a/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSourceTests.cs.meta b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSourceTests.cs.meta new file mode 100644 index 0000000..8f04e52 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/Tests/Runtime/VideoSourceTests.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: cc9378825144e2e4b815eae74e8b6232 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.microsoft.mixedreality.webrtc/package.json b/com.microsoft.mixedreality.webrtc/package.json new file mode 100644 index 0000000..6ef5d96 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/package.json @@ -0,0 +1,19 @@ +{ + "name": "com.microsoft.mixedreality.webrtc", + "version": "2.0.2", + "displayName": "MixedReality-WebRTC", + "description": "MixedReality-WebRTC provides multi-platform support for real-time audio/video/data communication using WebRTC, including for UWP devices like HoloLens 1 and HoloLens 2.\n\nEasy to integrate into any project, MixedReality-WebRTC offers components to manage a peer connection, to capture audio from a local microphone, to capture video from a local webcam, to render some remote video into a Unity texture, and to access remote audio as a Unity AudioSource object.\n\nConvenience components are also provided for typical uses like capturing the Unity scene content as rendered by any Unity Camera, and stream it as a video.\n\nAdvanced users can access the raw remote audio and video data via a set of callbacks, and inject their own custom audio and video data into a WebRTC track to be sent to the remote peer.\n\nSome samples showing those usages are provided in a separate package \"MixedReality-WebRTC Samples\".", + "unity": "2018.4", + "keywords": [ + "webrtc", + "mixed-reality", + "audio", + "video", + "realtime" + ], + "author": { + "name": "Microsoft", + "email": "mr-webrtc-devs@microsoft.com", + "url": "https://github.com/microsoft/MixedReality-WebRTC/" + } +} diff --git a/com.microsoft.mixedreality.webrtc/package.json.meta b/com.microsoft.mixedreality.webrtc/package.json.meta new file mode 100644 index 0000000..c8e93a1 --- /dev/null +++ b/com.microsoft.mixedreality.webrtc/package.json.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: f7c1f095fb7759f4c8038b5ea7ddb3ab +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: