Add webrtc package

This commit is contained in:
Santiago Lo Coco 2024-10-20 11:19:48 +02:00
parent a791d7011d
commit e5bf0a7c09
143 changed files with 9669 additions and 0 deletions

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 509ffae3bcf10934a8ee3aed9d422669
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f096824885b82e948a221c950ea92594
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,92 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEditor;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Property drawer for <see cref="CaptureCameraAttribute"/>, to report an error to the user if
/// the associated <see xref="UnityEngine.Camera"/> property instance cannot be used for framebuffer
/// capture by <see cref="SceneVideoSource"/>.
/// </summary>
[CustomPropertyDrawer(typeof(CaptureCameraAttribute))]
public class CaptureCameraDrawer : PropertyDrawer
{
private const int c_errorMessageHeight = 42;
public override void OnGUI(Rect position, SerializedProperty property, GUIContent label)
{
try
{
Validate(property.objectReferenceValue as Camera);
}
catch (Exception ex)
{
// Display error message below the property
var totalHeight = position.height;
position.yMin = position.yMax - c_errorMessageHeight;
EditorGUI.HelpBox(position, ex.Message, MessageType.Warning);
// Adjust rect for the property itself
position.yMin = position.yMax - totalHeight;
position.yMax -= c_errorMessageHeight;
}
EditorGUI.PropertyField(position, property, label);
}
public override float GetPropertyHeight(SerializedProperty property, GUIContent label)
{
float height = base.GetPropertyHeight(property, label);
try
{
Validate(property.objectReferenceValue as Camera);
}
catch (Exception)
{
// Add extra space for the error message
height += c_errorMessageHeight;
}
return height;
}
/// <summary>
/// Validate that a given <see xref="UnityEngine.Camera"/> instance can be used for framebuffer
/// capture by <see cref="SceneVideoSource"/> based on the current settings of the Unity Player
/// for the current build platform.
/// </summary>
/// <param name="camera">The camera instance to test the settings of.</param>
/// <exception xref="System.NotSupportedException">
/// The camera has settings not compatible with its use with <see cref="SceneVideoSource"/>.
/// </exception>
/// <seealso cref="CaptureCameraAttribute.Validate(Camera)"/>
public static void Validate(Camera camera)
{
if (PlayerSettings.virtualRealitySupported && (camera != null))
{
if (PlayerSettings.stereoRenderingPath == StereoRenderingPath.MultiPass)
{
// Ensure camera is not rendering to both eyes in multi-pass stereo, otherwise the command buffer
// is executed twice (once per eye) and will produce twice as many frames, which leads to stuttering
// when playing back the video stream resulting from combining those frames.
if (camera.stereoTargetEye == StereoTargetEyeMask.Both)
{
throw new NotSupportedException("Capture camera renders both eyes in multi-pass stereoscopic rendering. This is not" +
" supported by the capture mechanism which cannot discriminate them. Set Camera.stereoTargetEye to either Left or" +
" Right, or use a different rendering mode (Player Settings > XR Settings > Stereo Rendering Mode).");
}
}
#if !UNITY_2019_1_OR_NEWER
else if (PlayerSettings.stereoRenderingPath == StereoRenderingPath.Instancing)
{
throw new NotSupportedException("Capture camera does not support single-pass instanced stereoscopic rendering before Unity 2019.1." +
" Use a different stereoscopic rendering mode (Player Settings > XR Settings > Stereo Rendering Mode) or upgrade to Unity 2019.1+.");
}
#endif
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: aa1232332d1bd5f4e8caef2583b98ad9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,35 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEditor;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Property drawer for <see cref="ConfigurableIceServer"/>, to display servers on a single line
/// with the kind first (fixed width) and the server address next (stretching).
/// </summary>
[CustomPropertyDrawer(typeof(ConfigurableIceServer))]
public class ConfigurableIceServerDrawer : PropertyDrawer
{
const float kTypeWidth = 60f;
public override void OnGUI(Rect rect, SerializedProperty property, GUIContent label)
{
var type = property.FindPropertyRelative("Type");
EditorGUI.PropertyField(new Rect(rect.x, rect.y, kTypeWidth, rect.height), type, GUIContent.none);
rect.x += kTypeWidth - 10f;
rect.width -= kTypeWidth - 10f;
var uri = property.FindPropertyRelative("Uri");
EditorGUI.PropertyField(rect, uri, GUIContent.none);
}
public override float GetPropertyHeight(SerializedProperty property, GUIContent label)
{
return EditorStyles.textField.lineHeight + 3f;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 609cf190104833a4bb00a549d016469c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 5f4dbb05cc7783e42851c507ae42fd35
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.2 KiB

View File

@ -0,0 +1,236 @@
fileFormatVersion: 2
guid: b8d84fb8445678440929dfd49683d60d
TextureImporter:
fileIDToRecycleName:
21300000: icon_audio
21300002: icon_video
21300004: icon_sendrecv
21300006: icon_recvonly
21300008: icon_sendonly
21300010: icon_inactive
externalObjects: {}
serializedVersion: 9
mipmaps:
mipMapMode: 0
enableMipMap: 0
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -100
wrapU: 1
wrapV: 1
wrapW: -1
nPOTScale: 0
lightmap: 0
compressionQuality: 50
spriteMode: 2
spriteExtrude: 0
spriteMeshType: 0
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 64
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 0
alphaUsage: 1
alphaIsTransparency: 1
spriteTessellationDetail: -1
textureType: 8
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites:
- serializedVersion: 2
name: icon_audio
rect:
serializedVersion: 2
x: 0
y: 128
width: 64
height: 64
alignment: 0
pivot: {x: 0.5, y: 0.5}
border: {x: 0, y: 0, z: 0, w: 0}
outline: []
physicsShape: []
tessellationDetail: 0
bones: []
spriteID: 86923a9d0cc950d4eb6a63a817e45e76
vertices: []
indices:
edges: []
weights: []
- serializedVersion: 2
name: icon_video
rect:
serializedVersion: 2
x: 64
y: 128
width: 64
height: 64
alignment: 0
pivot: {x: 0.5, y: 0.5}
border: {x: 0, y: 0, z: 0, w: 0}
outline: []
physicsShape: []
tessellationDetail: 0
bones: []
spriteID: 48c07850933ea4944a58ed1f77736a0d
vertices: []
indices:
edges: []
weights: []
- serializedVersion: 2
name: icon_sendrecv
rect:
serializedVersion: 2
x: 0
y: 64
width: 64
height: 64
alignment: 0
pivot: {x: 0.5, y: 0.5}
border: {x: 0, y: 0, z: 0, w: 0}
outline: []
physicsShape: []
tessellationDetail: 0
bones: []
spriteID: d528722cdcdd2f445b567376409ac709
vertices: []
indices:
edges: []
weights: []
- serializedVersion: 2
name: icon_recvonly
rect:
serializedVersion: 2
x: 64
y: 64
width: 64
height: 64
alignment: 0
pivot: {x: 0.5, y: 0.5}
border: {x: 0, y: 0, z: 0, w: 0}
outline: []
physicsShape: []
tessellationDetail: 0
bones: []
spriteID: a20a054e0f7bcee469020f3cb36fa1a2
vertices: []
indices:
edges: []
weights: []
- serializedVersion: 2
name: icon_sendonly
rect:
serializedVersion: 2
x: 0
y: 0
width: 64
height: 64
alignment: 0
pivot: {x: 0.5, y: 0.5}
border: {x: 0, y: 0, z: 0, w: 0}
outline: []
physicsShape: []
tessellationDetail: 0
bones: []
spriteID: 3ba3b398bb58ca441a8e23ee3263b8a0
vertices: []
indices:
edges: []
weights: []
- serializedVersion: 2
name: icon_inactive
rect:
serializedVersion: 2
x: 64
y: 0
width: 64
height: 64
alignment: 0
pivot: {x: 0.5, y: 0.5}
border: {x: 0, y: 0, z: 0, w: 0}
outline: []
physicsShape: []
tessellationDetail: 0
bones: []
spriteID: fc03eb6886bf96c458b238a0506a1730
vertices: []
indices:
edges: []
weights: []
outline: []
physicsShape: []
bones: []
spriteID: 9f83ab749ebbef2409485e22f7e40288
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,302 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="256"
height="192"
viewBox="0 0 256 192.00002"
version="1.1"
id="svg8"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)"
sodipodi:docname="editor_icons.svg"
inkscape:export-xdpi="96"
inkscape:export-ydpi="96"
<defs
id="defs2" />
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="4.5685727"
inkscape:cx="-6.6604829"
inkscape:cy="111.02443"
inkscape:document-units="px"
inkscape:current-layer="layer1"
showgrid="true"
units="px"
inkscape:pagecheckerboard="true"
inkscape:window-width="3840"
inkscape:window-height="2066"
inkscape:window-x="3829"
inkscape:window-y="982"
inkscape:window-maximized="1"
inkscape:snap-object-midpoints="true"
inkscape:snap-center="true">
<inkscape:grid
type="xygrid"
id="grid815"
empspacing="8" />
</sodipodi:namedview>
<metadata
id="metadata5">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<g
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
transform="translate(0,-930.5196)">
<path
style="fill:#2e3436;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 11,970.51961 v -16 h 10 l 12,-12 h 2 v 40 h -2 l -12,-12 z"
id="path818"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccccccccc" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
id="path820"
d="m 42.128026,952.12735 a 12,12 0 0 1 5.999999,10.3923 12,12 0 0 1 -6,10.39231"
sodipodi:type="arc"
sodipodi:open="true"
sodipodi:start="5.2359878"
sodipodi:end="1.0471976"
sodipodi:ry="12"
sodipodi:rx="12"
sodipodi:cy="962.51965"
sodipodi:cx="36.128025" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3.00000024;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
id="path820-2"
sodipodi:type="arc"
sodipodi:cx="36.128029"
sodipodi:cy="962.51965"
sodipodi:rx="18.495964"
sodipodi:ry="18.495964"
sodipodi:start="5.2359878"
sodipodi:end="1.0471976"
d="m 45.376012,946.50168 a 18.495964,18.495964 0 0 1 9.247981,16.01797 18.495964,18.495964 0 0 1 -9.247983,16.01798"
sodipodi:open="true" />
<path
id="path856"
style="fill:none;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 72,974.51961 h 48 m -48,-24 h 48 m -48,-8 h 48 v 40 H 72 Z"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccccccccc" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 84,950.51961 v -8"
id="path859"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 96,950.51961 c 0,-1 0,-8 0,-8"
id="path861"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 108,950.51961 v -8"
id="path863"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 84,982.51961 v -8"
id="path859-5"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 96,982.51961 c 0,-1 0,-8 0,-8"
id="path861-6"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 108,982.51961 v -8"
id="path863-8"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:#2e3436;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 80,974.51961 12,-12 6,6 11,-11 11,11 v 6 z"
id="path912"
inkscape:connector-curvature="0" />
<circle
style="opacity:1;vector-effect:none;fill:#2e3436;fill-opacity:1;stroke:#2e3436;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
id="path928"
cx="81"
cy="958.51959"
r="4" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#2e3436;fill-opacity:1;fill-rule:nonzero;stroke:#2e3436;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 39.991193,1067.5048 a 3.0003,3.0003 0 0 0 -2.089844,5.1504 l 6.878906,6.8789 H 12.022443 a 3.0003,3.0003 0 1 0 0,6 h 40 a 3.0003,3.0003 0 0 0 2.121094,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.152344,-0.9082 z"
id="path959-4-6"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 11.978201,1095.4981 a 3.0003,3.0003 0 0 0 -2.1210939,5.1211 l 11.9999999,12 a 3.0003,3.0003 0 1 0 4.242188,-4.2422 l -6.878906,-6.8789 h 32.757812 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 103.99119,1067.5047 a 3.0003,3.0003 0 0 0 -2.08984,5.1504 l 6.8789,6.8789 H 76.022443 a 3.0003,3.0003 0 1 0 0,6 h 39.999997 a 3.0003,3.0003 0 0 0 2.1211,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.15235,-0.9082 z"
id="path959-4-6-2"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 75.978201,1095.498 a 3.0003,3.0003 0 0 0 -2.121094,5.1211 l 12,12 a 3.0003,3.0003 0 1 0 4.242188,-4.2422 l -6.878906,-6.8789 H 115.9782 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3-2"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#2e3436;fill-opacity:1;fill-rule:nonzero;stroke:#2e3436;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 39.991193,1003.5047 a 3.0003,3.0003 0 0 0 -2.089844,5.1504 l 6.878906,6.8789 H 12.022443 a 3.0003,3.0003 0 1 0 0,6 h 40 a 3.0003,3.0003 0 0 0 2.121094,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.152344,-0.9082 z"
id="path959-4-6-3"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#2e3436;fill-opacity:1;fill-rule:nonzero;stroke:#2e3436;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 11.978201,1031.498 a 3.0003,3.0003 0 0 0 -2.121094,5.1211 l 12,12 a 3.0003,3.0003 0 1 0 4.242188,-4.2422 l -6.878906,-6.8789 h 32.757812 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3-4"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 103.99119,1003.5048 a 3.0003,3.0003 0 0 0 -2.08984,5.1504 l 6.8789,6.8789 H 76.022443 a 3.0003,3.0003 0 1 0 0,6 h 39.999997 a 3.0003,3.0003 0 0 0 2.1211,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.15235,-0.9082 z"
id="path959-4-6-23"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#2e3436;fill-opacity:1;fill-rule:nonzero;stroke:#2e3436;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate"
d="m 75.978201,1031.498 a 3.0003,3.0003 0 0 0 -2.121094,5.1211 l 12,12 a 3.0003,3.0003 0 1 0 4.242188,-4.2422 l -6.878906,-6.8789 H 115.9782 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3-0"
inkscape:connector-curvature="0" />
<path
style="fill:#babdb6;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 139,970.51961 v -16 h 10 l 12,-12 h 2 v 40 h -2 l -12,-12 z"
id="path818-8"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccccccccc" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
id="path820-4"
d="m 170.12802,952.12735 a 12,12 0 0 1 6,10.3923 12,12 0 0 1 -6,10.39231"
sodipodi:type="arc"
sodipodi:open="true"
sodipodi:start="5.2359878"
sodipodi:end="1.0471976"
sodipodi:ry="12"
sodipodi:rx="12"
sodipodi:cy="962.51965"
sodipodi:cx="164.12802" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3.00000024;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
id="path820-2-7"
sodipodi:type="arc"
sodipodi:cx="164.12802"
sodipodi:cy="962.51965"
sodipodi:rx="18.495964"
sodipodi:ry="18.495964"
sodipodi:start="5.2359878"
sodipodi:end="1.0471976"
d="m 173.376,946.50168 a 18.495964,18.495964 0 0 1 9.24799,16.01797 18.495964,18.495964 0 0 1 -9.24799,16.01798"
sodipodi:open="true" />
<path
id="path856-2"
style="fill:none;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 200,974.51961 h 48 m -48,-24 h 48 m -48,-8 h 48 v 40 h -48 z"
inkscape:connector-curvature="0"
sodipodi:nodetypes="ccccccccc" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 212,950.51961 v -8"
id="path859-4"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 224,950.51961 c 0,-1 0,-8 0,-8"
id="path861-1"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 236,950.51961 v -8"
id="path863-0"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 212,982.51961 v -8"
id="path859-5-3"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 224,982.51961 c 0,-1 0,-8 0,-8"
id="path861-6-9"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:none;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 236,982.51961 v -8"
id="path863-8-2"
inkscape:connector-curvature="0" />
<path
style="opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
d="m 208,974.51961 12,-12 6,6 11,-11 11,11 v 6 z"
id="path912-1"
inkscape:connector-curvature="0" />
<circle
style="opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;stroke:#babdb6;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers"
id="path928-5"
cx="209"
cy="958.51959"
r="4" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 167.99119,1067.5048 a 3.0003,3.0003 0 0 0 -2.08984,5.1504 l 6.8789,6.8789 h -32.75781 a 3.0003,3.0003 0 1 0 0,6 h 40 a 3.0003,3.0003 0 0 0 2.1211,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.15235,-0.9082 z"
id="path959-4-6-5"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#636363;fill-opacity:1;fill-rule:nonzero;stroke:#636363;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 139.9782,1095.4981 a 3.0003,3.0003 0 0 0 -2.12109,5.1211 l 12,12 a 3.0003,3.0003 0 1 0 4.24218,-4.2422 l -6.8789,-6.8789 h 32.75781 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3-6"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#636363;fill-opacity:1;fill-rule:nonzero;stroke:#636363;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 231.99119,1067.5047 a 3.0003,3.0003 0 0 0 -2.08984,5.1504 l 6.8789,6.8789 h -32.75781 a 3.0003,3.0003 0 1 0 0,6 h 40 a 3.0003,3.0003 0 0 0 2.1211,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.15235,-0.9082 z"
id="path959-4-6-2-9"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#636363;fill-opacity:1;fill-rule:nonzero;stroke:#636363;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 203.9782,1095.498 a 3.0003,3.0003 0 0 0 -2.12109,5.1211 l 12,12 a 3.0003,3.0003 0 1 0 4.24218,-4.2422 l -6.8789,-6.8789 h 32.75781 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3-2-0"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 167.99119,1003.5047 a 3.0003,3.0003 0 0 0 -2.08984,5.1504 l 6.8789,6.8789 h -32.75781 a 3.0003,3.0003 0 1 0 0,6 h 40 a 3.0003,3.0003 0 0 0 2.1211,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.15235,-0.9082 z"
id="path959-4-6-3-8"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 139.9782,1031.498 a 3.0003,3.0003 0 0 0 -2.12109,5.1211 l 12,12 a 3.0003,3.0003 0 1 0 4.24218,-4.2422 l -6.8789,-6.8789 h 32.75781 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3-4-1"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#636363;fill-opacity:1;fill-rule:nonzero;stroke:#636363;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 231.99119,1003.5048 a 3.0003,3.0003 0 0 0 -2.08984,5.1504 l 6.8789,6.8789 h -32.75781 a 3.0003,3.0003 0 1 0 0,6 h 40 a 3.0003,3.0003 0 0 0 2.1211,-5.1211 l -12,-12 a 3.0003,3.0003 0 0 0 -2.15235,-0.9082 z"
id="path959-4-6-23-3"
inkscape:connector-curvature="0" />
<path
style="color:#000000;font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:medium;line-height:normal;font-family:sans-serif;font-variant-ligatures:normal;font-variant-position:normal;font-variant-caps:normal;font-variant-numeric:normal;font-variant-alternates:normal;font-feature-settings:normal;text-indent:0;text-align:start;text-decoration:none;text-decoration-line:none;text-decoration-style:solid;text-decoration-color:#000000;letter-spacing:normal;word-spacing:normal;text-transform:none;writing-mode:lr-tb;direction:ltr;text-orientation:mixed;dominant-baseline:auto;baseline-shift:baseline;text-anchor:start;white-space:normal;shape-padding:0;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;vector-effect:none;fill:#babdb6;fill-opacity:1;fill-rule:nonzero;stroke:#babdb6;stroke-width:2;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;paint-order:stroke fill markers;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate;font-variant-east_asian:normal"
d="m 203.9782,1031.498 a 3.0003,3.0003 0 0 0 -2.12109,5.1211 l 12,12 a 3.0003,3.0003 0 1 0 4.24218,-4.2422 l -6.8789,-6.8789 h 32.75781 a 3.0003,3.0003 0 1 0 0,-6 z"
id="path959-4-7-3-0-7"
inkscape:connector-curvature="0" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 35 KiB

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 2f1d1bcc495fc4342931e3e52e5d205f
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

Binary file not shown.

After

Width:  |  Height:  |  Size: 969 B

View File

@ -0,0 +1,110 @@
fileFormatVersion: 2
guid: f845cb707dda5c84594956f49655100f
TextureImporter:
fileIDToRecycleName: {}
externalObjects: {}
serializedVersion: 9
mipmaps:
mipMapMode: 0
enableMipMap: 1
sRGBTexture: 1
linearTexture: 0
fadeOut: 0
borderMipMap: 0
mipMapsPreserveCoverage: 0
alphaTestReferenceValue: 0.5
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
streamingMipmaps: 0
streamingMipmapsPriority: 0
grayScaleToAlpha: 0
generateCubemap: 6
cubemapConvolution: 0
seamlessCubemap: 0
textureFormat: 1
maxTextureSize: 2048
textureSettings:
serializedVersion: 2
filterMode: -1
aniso: -1
mipBias: -100
wrapU: -1
wrapV: -1
wrapW: -1
nPOTScale: 1
lightmap: 0
compressionQuality: 50
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spritePixelsToUnits: 100
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spriteGenerateFallbackPhysicsShape: 1
alphaUsage: 1
alphaIsTransparency: 1
spriteTessellationDetail: -1
textureType: 0
textureShape: 1
singleChannelComponent: 0
maxTextureSizeSet: 0
compressionQualitySet: 0
textureFormatSet: 0
platformSettings:
- serializedVersion: 2
buildTarget: DefaultTexturePlatform
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Standalone
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
- serializedVersion: 2
buildTarget: Windows Store Apps
maxTextureSize: 2048
resizeAlgorithm: 0
textureFormat: -1
textureCompression: 1
compressionQuality: 50
crunchedCompression: 0
allowsAlphaSplitting: 0
overridden: 0
androidETC2FallbackOverride: 0
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
physicsShape: []
bones: []
spriteID:
vertices: []
indices:
edges: []
weights: []
spritePackingTag:
pSDRemoveMatte: 0
pSDShowRemoveMatteOption: 0
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,54 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEditor;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Inspector editor for <see cref="MicrophoneSource"/>.
/// </summary>
[CustomEditor(typeof(MicrophoneSource))]
[CanEditMultipleObjects]
public class MicrophoneSourceEditor : UnityEditor.Editor
{
SerializedProperty _autoGainControl;
SerializedProperty _audioSourceStopped;
void OnEnable()
{
_autoGainControl = serializedObject.FindProperty("_autoGainControl");
}
/// <summary>
/// Override implementation of <a href="https://docs.unity3d.com/ScriptReference/Editor.OnInspectorGUI.html">Editor.OnInspectorGUI</a>
/// to draw the inspector GUI for the currently selected <see cref="MicrophoneSource"/>.
/// </summary>
public override void OnInspectorGUI()
{
serializedObject.Update();
if (!PlayerSettings.WSA.GetCapability(PlayerSettings.WSACapability.Microphone))
{
EditorGUILayout.HelpBox("The UWP player is missing the Microphone capability. The MicrophoneSource component will not function correctly."
+ " Add the Microphone capability in Project Settings > Player > UWP > Publishing Settings > Capabilities.", MessageType.Error);
if (GUILayout.Button("Open Player Settings"))
{
SettingsService.OpenProjectSettings("Project/Player");
}
if (GUILayout.Button("Add Microphone Capability"))
{
PlayerSettings.WSA.SetCapability(PlayerSettings.WSACapability.Microphone, true);
}
}
GUILayout.Space(10);
EditorGUILayout.LabelField("Audio processing", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(_autoGainControl);
serializedObject.ApplyModifiedProperties();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b5b57eb30bd3d85419bebdff6d5a9b54
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,16 @@
{
"name": "Microsoft.MixedReality.WebRTC.Unity.Editor",
"references": [
"Microsoft.MixedReality.WebRTC.Unity"
],
"optionalUnityReferences": [],
"includePlatforms": [
"Editor"
],
"excludePlatforms": [],
"allowUnsafeCode": false,
"overrideReferences": false,
"precompiledReferences": [],
"autoReferenced": true,
"defineConstraints": []
}

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: bab493aa30700344182f56419f1f775d
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,293 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
using UnityEditor;
using UnityEditorInternal;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Custom editor for the <see cref="Microsoft.MixedReality.WebRTC.Unity.PeerConnection"/> component.
/// </summary>
[CustomEditor(typeof(PeerConnection))]
[CanEditMultipleObjects]
public class PeerConnectionEditor : UnityEditor.Editor
{
/// <summary>
/// Height of a single line of controls (e.g. single sender or receiver).
/// </summary>
const float kLineHeight = 22;
/// <summary>
/// Spacing between list items (transceivers), for readability.
/// </summary>
const float kItemSpacing = 3;
const float kIconSpacing = 25;
SerializedProperty autoCreateOffer_;
SerializedProperty autoLogErrors_;
SerializedProperty iceServers_;
SerializedProperty iceUsername_;
SerializedProperty iceCredential_;
SerializedProperty onInitialized_;
SerializedProperty onShutdown_;
SerializedProperty onError_;
ReorderableList transceiverList_;
SerializedProperty mediaLines_;
enum IconType
{
Audio,
Video,
SendRecv,
RecvOnly,
SendOnly,
Inactive
}
Sprite[] sprites_ = new Sprite[6];
void DrawSpriteIcon(IconType type, Rect rect)
{
var sprite = sprites_[(int)type];
var texture = sprite.texture;
Rect texCoords = sprite.textureRect;
Vector2 texelSize = texture.texelSize;
texCoords.x *= texelSize.x;
texCoords.y *= texelSize.y;
texCoords.width *= texelSize.x;
texCoords.height *= texelSize.y;
if (EditorGUIUtility.isProSkin)
{
texCoords.x += 0.5f;
}
GUI.DrawTextureWithTexCoords(rect, texture, texCoords);
}
private void Awake()
{
// Load sprites for transceiver list control
var objects = AssetDatabase.LoadAllAssetsAtPath("Packages/com.microsoft.mixedreality.webrtc/Editor/Icons/editor_icons.png");
foreach (var obj in objects)
{
if (obj is Sprite sprite)
{
if (sprite.name == "icon_audio")
{
sprites_[(int)IconType.Audio] = sprite;
}
else if (sprite.name == "icon_video")
{
sprites_[(int)IconType.Video] = sprite;
}
else if (sprite.name == "icon_sendrecv")
{
sprites_[(int)IconType.SendRecv] = sprite;
}
else if (sprite.name == "icon_recvonly")
{
sprites_[(int)IconType.RecvOnly] = sprite;
}
else if (sprite.name == "icon_sendonly")
{
sprites_[(int)IconType.SendOnly] = sprite;
}
else if (sprite.name == "icon_inactive")
{
sprites_[(int)IconType.Inactive] = sprite;
}
}
}
}
void OnEnable()
{
autoCreateOffer_ = serializedObject.FindProperty("AutoCreateOfferOnRenegotiationNeeded");
autoLogErrors_ = serializedObject.FindProperty("AutoLogErrorsToUnityConsole");
iceServers_ = serializedObject.FindProperty("IceServers");
iceUsername_ = serializedObject.FindProperty("IceUsername");
iceCredential_ = serializedObject.FindProperty("IceCredential");
onInitialized_ = serializedObject.FindProperty("OnInitialized");
onShutdown_ = serializedObject.FindProperty("OnShutdown");
onError_ = serializedObject.FindProperty("OnError");
mediaLines_ = serializedObject.FindProperty("_mediaLines");
transceiverList_ = new ReorderableList(serializedObject, mediaLines_, draggable: true,
displayHeader: true, displayAddButton: false, displayRemoveButton: true);
transceiverList_.elementHeightCallback =
(int index) =>
{
float height = kItemSpacing + 2 * kLineHeight;
var element = transceiverList_.serializedProperty.GetArrayElementAtIndex(index);
var src = element.FindPropertyRelative("_source");
if (src.isExpanded)
{
var trackName = element.FindPropertyRelative("SenderTrackName");
// FIXME - SdpTokenDrawer.OnGUI() is called with h=16px instead of the total height, breaking the layout
height += kLineHeight; // EditorGUI.GetPropertyHeight(trackName) + kItemSpacing;
}
return height;
};
transceiverList_.drawHeaderCallback = (Rect rect) => EditorGUI.LabelField(rect, "Transceivers");
transceiverList_.drawElementCallback =
(Rect rect, int index, bool isActive, bool isFocused) =>
{
var element = transceiverList_.serializedProperty.GetArrayElementAtIndex(index);
float x0 = rect.x;
float x1 = x0 + 16;
float y0 = rect.y + 2;
float y1 = y0 + kLineHeight;
// MID value
EditorGUI.LabelField(new Rect(x0 - 14, y1, 20, 20), $"{index}");
// Audio or video icon for transceiver kind
MediaKind mediaKind = (MediaKind)element.FindPropertyRelative("_mediaKind").intValue;
System.Type senderType, receiverType;
if (mediaKind == MediaKind.Audio)
{
senderType = typeof(AudioTrackSource);
receiverType = typeof(AudioReceiver);
DrawSpriteIcon(IconType.Audio, new Rect(x0, rect.y, 20, 20));
}
else
{
senderType = typeof(VideoTrackSource);
receiverType = typeof(VideoReceiver);
DrawSpriteIcon(IconType.Video, new Rect(x0, rect.y, 20, 20));
}
rect.x += (kIconSpacing + 10);
rect.width -= (kIconSpacing + 10);
float fieldWidth = rect.width;
bool hasSender = false;
bool hasReceiver = false;
bool sourceIsExpanded = false;
{
var p = element.FindPropertyRelative("_source");
Object obj = p.objectReferenceValue;
sourceIsExpanded = EditorGUI.Foldout(new Rect(rect.x, y0, 0, EditorGUIUtility.singleLineHeight), p.isExpanded, new GUIContent());
p.isExpanded = sourceIsExpanded;
obj = EditorGUI.ObjectField(
new Rect(rect.x, y0, fieldWidth, EditorGUIUtility.singleLineHeight),
obj, senderType, true);
hasSender = (obj != null);
p.objectReferenceValue = obj;
y0 += kLineHeight;
}
if (sourceIsExpanded)
{
var p = element.FindPropertyRelative("_senderTrackName");
// FIXME - SdpTokenDrawer.OnGUI() is called with h=16px instead of the total height, breaking the layout
//EditorGUI.PropertyField(new Rect(rect.x + 10, y0, fieldWidth - 8, EditorGUIUtility.singleLineHeight), p);
//y0 += EditorGUI.GetPropertyHeight(p) + 6;
string val = p.stringValue;
val = EditorGUI.TextField(new Rect(rect.x + 10, y0, fieldWidth - 8, EditorGUIUtility.singleLineHeight), "Track name", val);
p.stringValue = val;
y0 += kLineHeight;
}
{
var p = element.FindPropertyRelative("_receiver");
Object obj = p.objectReferenceValue;
obj = EditorGUI.ObjectField(
new Rect(rect.x, y0, fieldWidth, EditorGUIUtility.singleLineHeight),
obj, receiverType, true);
hasReceiver = (obj != null);
p.objectReferenceValue = obj;
}
IconType iconType = IconType.Inactive;
if (hasSender)
{
if (hasReceiver)
{
iconType = IconType.SendRecv;
}
else
{
iconType = IconType.SendOnly;
}
}
else if (hasReceiver)
{
iconType = IconType.RecvOnly;
}
DrawSpriteIcon(iconType, new Rect(x0, y1, 16, 16));
};
transceiverList_.drawNoneElementCallback = (Rect rect) =>
{
GUIStyle style = new GUIStyle(EditorStyles.label);
style.alignment = TextAnchor.MiddleCenter;
EditorGUI.LabelField(rect, "(empty)", style);
};
}
public override void OnInspectorGUI()
{
serializedObject.Update();
#if UNITY_WSA
if (!PlayerSettings.WSA.GetCapability(PlayerSettings.WSACapability.Microphone))
{
EditorGUILayout.HelpBox("The UWP player is missing the Microphone capability. Currently on UWP the native WebRTC implementation always tries to"
+ " open the microphone while initializing the audio subsystem at startup. Not granting access will fail initialization, and generally crash the app."
+ " Add the Microphone capability in Project Settings > Player > UWP > Publishing Settings > Capabilities.", MessageType.Error);
if (GUILayout.Button("Open Player Settings"))
{
SettingsService.OpenProjectSettings("Project/Player");
}
if (GUILayout.Button("Add Microphone Capability"))
{
PlayerSettings.WSA.SetCapability(PlayerSettings.WSACapability.Microphone, true);
}
}
#endif
EditorGUILayout.Space();
EditorGUILayout.PropertyField(autoLogErrors_, new GUIContent("Log errors to the Unity console",
"Log the WebRTC errors to the Unity console."));
EditorGUILayout.Space();
EditorGUILayout.LabelField("Signaling", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(iceServers_, true);
EditorGUILayout.PropertyField(iceUsername_);
EditorGUILayout.PropertyField(iceCredential_);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Media", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(autoCreateOffer_);
transceiverList_.DoLayoutList();
using (var _ = new EditorGUILayout.HorizontalScope())
{
if (GUILayout.Button("+ Audio", EditorStyles.miniButton))
{
((PeerConnection)serializedObject.targetObject).AddMediaLine(MediaKind.Audio);
}
if (GUILayout.Button("+ Video", EditorStyles.miniButton))
{
((PeerConnection)serializedObject.targetObject).AddMediaLine(MediaKind.Video);
}
}
EditorGUILayout.Space();
EditorGUILayout.LabelField("Events", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(onInitialized_);
EditorGUILayout.PropertyField(onShutdown_);
EditorGUILayout.PropertyField(onError_);
serializedObject.ApplyModifiedProperties();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: defa9eea11e09dc4b951b5f414f839e9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,57 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEditor;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Property drawer for <see cref="SdpTokenAttribute"/>, to validate the associated string
/// property content and display an error message box if invalid characters are found.
/// </summary>
[CustomPropertyDrawer(typeof(SdpTokenAttribute))]
public class SdpTokenDrawer : PropertyDrawer
{
private const int c_errorMessageHeight = 35;
public override void OnGUI(Rect position, SerializedProperty property, GUIContent label)
{
try
{
var sdpTokenAttr = attribute as SdpTokenAttribute;
SdpTokenAttribute.Validate(property.stringValue, sdpTokenAttr.AllowEmpty);
}
catch (ArgumentException)
{
// Display error message below the property
var totalHeight = position.height;
position.yMin = position.yMax - c_errorMessageHeight;
EditorGUI.HelpBox(position, "Invalid characters in property. SDP tokens cannot contain some characters like space or quote. See SdpTokenAttribute.Validate() for details.", MessageType.Error);
// Adjust rect for the property itself
position.yMin = position.yMax - totalHeight;
position.yMax -= c_errorMessageHeight;
}
EditorGUI.PropertyField(position, property, label);
}
public override float GetPropertyHeight(SerializedProperty property, GUIContent label)
{
float height = base.GetPropertyHeight(property, label);
try
{
var sdpTokenAttr = attribute as SdpTokenAttribute;
SdpTokenAttribute.Validate(property.stringValue, sdpTokenAttr.AllowEmpty);
}
catch (ArgumentException)
{
// Add extra space for the error message
height += c_errorMessageHeight;
}
return height;
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 51d1ddf7db40c6948a42f912402d20a7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,24 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEditor;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Property drawer for <see cref="ToggleLeftAttribute"/>.
/// </summary>
[CustomPropertyDrawer(typeof(ToggleLeftAttribute))]
public class ToggleLeftDrawer : PropertyDrawer
{
public override void OnGUI(Rect position, SerializedProperty property, GUIContent label)
{
using (new EditorGUI.PropertyScope(position, label, property))
{
property.boolValue = EditorGUI.ToggleLeft(position, label, property.boolValue);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 317c7bfb57fe2824cbe37db424aaabe8
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,60 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEditor;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Inspector editor for <see cref="VideoRenderer"/>.
/// </summary>
[CustomEditor(typeof(VideoRenderer))]
[CanEditMultipleObjects]
public class VideoRendererEditor : UnityEditor.Editor
{
SerializedProperty _maxFramerate;
SerializedProperty _enableStatistics;
SerializedProperty _frameLoadStatHolder;
SerializedProperty _framePresentStatHolder;
SerializedProperty _frameSkipStatHolder;
void OnEnable()
{
_maxFramerate = serializedObject.FindProperty("MaxFramerate");
_enableStatistics = serializedObject.FindProperty("EnableStatistics");
_frameLoadStatHolder = serializedObject.FindProperty("FrameLoadStatHolder");
_framePresentStatHolder = serializedObject.FindProperty("FramePresentStatHolder");
_frameSkipStatHolder = serializedObject.FindProperty("FrameSkipStatHolder");
}
/// <summary>
/// Override implementation of <a href="https://docs.unity3d.com/ScriptReference/Editor.OnInspectorGUI.html">Editor.OnInspectorGUI</a>
/// to draw the inspector GUI for the currently selected <see cref="MicrophoneSource"/>.
/// </summary>
public override void OnInspectorGUI()
{
serializedObject.Update();
GUILayout.Space(10);
EditorGUILayout.LabelField("Video", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(_maxFramerate);
GUILayout.Space(10);
EditorGUILayout.PropertyField(_enableStatistics);
if (_enableStatistics.boolValue)
{
using (new EditorGUI.IndentLevelScope())
{
EditorGUILayout.PropertyField(_frameLoadStatHolder);
EditorGUILayout.PropertyField(_framePresentStatHolder);
EditorGUILayout.PropertyField(_frameSkipStatHolder);
}
}
serializedObject.ApplyModifiedProperties();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: af70ba26c91da4f48bb7da544db3fc9e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,497 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEditor;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Inspector editor for <see cref="WebcamSource"/>.
/// </summary>
[CustomEditor(typeof(WebcamSource))]
[CanEditMultipleObjects]
public class WebcamSourceEditor : UnityEditor.Editor
{
SerializedProperty _enableMixedRealityCapture;
SerializedProperty _enableMrcRecordingIndicator;
SerializedProperty _formatMode;
SerializedProperty _videoProfileId;
SerializedProperty _videoProfileKind;
SerializedProperty _constraints;
SerializedProperty _width;
SerializedProperty _height;
SerializedProperty _framerate;
SerializedProperty _videoStreamStarted;
SerializedProperty _videoStreamStopped;
GUIContent _anyContent;
float _anyWidth;
float _unitWidth;
int _prevWidth = 640;
int _prevHeight = 480;
double _prevFramerate = 30.0;
VideoProfileKind _prevVideoProfileKind = VideoProfileKind.VideoConferencing;
string _prevVideoProfileId = "<profile id>";
/// <summary>
/// Helper enumeration for commonly used video codecs.
/// The enum names must match exactly the standard SDP naming.
/// See https://en.wikipedia.org/wiki/RTP_audio_video_profile for reference.
/// </summary>
enum SdpVideoCodecs
{
/// <summary>
/// Do not force any codec, let WebRTC decide.
/// </summary>
None,
/// <summary>
/// Try to use H.264 if available.
/// </summary>
H264,
/// <summary>
/// Try to use VP8 if available.
/// </summary>
VP8,
/// <summary>
/// Try to use VP9 if available.
/// </summary>
VP9,
/// <summary>
/// Try to use the given codec if available.
/// </summary>
Custom
}
void OnEnable()
{
_enableMixedRealityCapture = serializedObject.FindProperty("EnableMixedRealityCapture");
_enableMrcRecordingIndicator = serializedObject.FindProperty("EnableMRCRecordingIndicator");
_formatMode = serializedObject.FindProperty("FormatMode");
_videoProfileId = serializedObject.FindProperty("VideoProfileId");
_videoProfileKind = serializedObject.FindProperty("VideoProfileKind");
_constraints = serializedObject.FindProperty("Constraints");
_width = _constraints.FindPropertyRelative("width");
_height = _constraints.FindPropertyRelative("height");
_framerate = _constraints.FindPropertyRelative("framerate");
_videoStreamStarted = serializedObject.FindProperty("VideoStreamStarted");
_videoStreamStopped = serializedObject.FindProperty("VideoStreamStopped");
_anyContent = new GUIContent("(any)");
_anyWidth = -1f; // initialized later
_unitWidth = -1f; // initialized later
}
/// <summary>
/// Override implementation of <a href="https://docs.unity3d.com/ScriptReference/Editor.OnInspectorGUI.html">Editor.OnInspectorGUI</a>
/// to draw the inspector GUI for the currently selected <see cref="WebcamSource"/>.
/// </summary>
public override void OnInspectorGUI()
{
// CalcSize() can only be called inside a GUI method
if (_anyWidth < 0)
_anyWidth = GUI.skin.label.CalcSize(_anyContent).x;
if (_unitWidth < 0)
_unitWidth = GUI.skin.label.CalcSize(new GUIContent("fps")).x;
serializedObject.Update();
if (!PlayerSettings.WSA.GetCapability(PlayerSettings.WSACapability.WebCam))
{
EditorGUILayout.HelpBox("The UWP player is missing the WebCam capability. The WebcamSource component will not function correctly."
+ " Add the WebCam capability in Project Settings > Player > UWP > Publishing Settings > Capabilities.", MessageType.Error);
if (GUILayout.Button("Open Player Settings"))
{
SettingsService.OpenProjectSettings("Project/Player");
}
if (GUILayout.Button("Add WebCam Capability"))
{
PlayerSettings.WSA.SetCapability(PlayerSettings.WSACapability.WebCam, true);
}
}
GUILayout.Space(10);
EditorGUILayout.LabelField("Video capture", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(_formatMode, new GUIContent("Capture format",
"Decide how to obtain the constraints used to select the best capture format."));
if ((LocalVideoSourceFormatMode)_formatMode.intValue == LocalVideoSourceFormatMode.Manual)
{
using (new EditorGUI.IndentLevelScope())
{
EditorGUILayout.LabelField("General constraints (all platforms)");
using (new EditorGUI.IndentLevelScope())
{
OptionalIntField(_width, ref _prevWidth,
new GUIContent("Width", "Only consider capture formats with the specified width."),
new GUIContent("px", "Pixels"));
OptionalIntField(_height, ref _prevHeight,
new GUIContent("Height", "Only consider capture formats with the specified height."),
new GUIContent("px", "Pixels"));
OptionalDoubleField(_framerate, ref _prevFramerate,
new GUIContent("Framerate", "Only consider capture formats with the specified framerate."),
new GUIContent("fps", "Frames per second"));
}
EditorGUILayout.LabelField("UWP constraints");
using (new EditorGUI.IndentLevelScope())
{
OptionalEnumField(_videoProfileKind, VideoProfileKind.Unspecified, ref _prevVideoProfileKind,
new GUIContent("Video profile kind", "Only consider capture formats associated with the specified video profile kind."));
OptionalTextField(_videoProfileId, ref _prevVideoProfileId,
new GUIContent("Video profile ID", "Only consider capture formats associated with the specified video profile."));
if ((_videoProfileKind.intValue != (int)VideoProfileKind.Unspecified) && (_videoProfileId.stringValue.Length > 0))
{
EditorGUILayout.HelpBox("Video profile ID is already unique. Specifying also a video kind over-constrains the selection algorithm and can decrease the chances of finding a matching video profile. It is recommended to select either a video profile kind, or a video profile ID.", MessageType.Warning);
}
}
}
}
_enableMixedRealityCapture.boolValue = EditorGUILayout.ToggleLeft("Enable Mixed Reality Capture (MRC)", _enableMixedRealityCapture.boolValue);
if (_enableMixedRealityCapture.boolValue)
{
using (var scope = new EditorGUI.IndentLevelScope())
{
_enableMrcRecordingIndicator.boolValue = EditorGUILayout.ToggleLeft("Show recording indicator in device", _enableMrcRecordingIndicator.boolValue);
if (!PlayerSettings.virtualRealitySupported)
{
EditorGUILayout.HelpBox("Mixed Reality Capture can only work in exclusive-mode apps. XR support must be enabled in Project Settings > Player > XR Settings > Virtual Reality Supported, and the project then saved to disk.", MessageType.Error);
if (GUILayout.Button("Enable XR support"))
{
PlayerSettings.virtualRealitySupported = true;
}
}
}
}
GUILayout.Space(10);
EditorGUILayout.PropertyField(_videoStreamStarted);
EditorGUILayout.PropertyField(_videoStreamStopped);
serializedObject.ApplyModifiedProperties();
}
/// <summary>
/// ToggleLeft control associated with a given SerializedProperty, to enable automatic GUI
/// handlings like Prefab revert menu.
/// </summary>
/// <param name="property">The boolean property associated with the control.</param>
/// <param name="label">The label to display next to the toggle control.</param>
private void ToggleLeft(SerializedProperty property, GUIContent label)
{
var rect = EditorGUILayout.GetControlRect();
using (new EditorGUI.PropertyScope(rect, label, property))
{
property.boolValue = EditorGUI.ToggleLeft(rect, label, property.boolValue);
}
}
/// <summary>
/// IntField with optional toggle associated with a given SerializedProperty, to enable
/// automatic GUI handlings like Prefab revert menu.
///
/// Valid integer values are any non-zero positive integer. Any negative or zero value
/// is considered invalid, and means that the value is considered as not set, which shows
/// up as an unchecked left toggle widget.
///
/// To enforce a valid value when the toggle control is checked by the user, a default valid
/// value is provided <paramref name="lastValidValue"/>. For UI consistency, the last selected
/// valid value is returned in <paramref name="lastValidValue"/>, to allow toggling the field
/// ON and OFF without losing the valid value it previously had.
/// </summary>
/// <param name="intProperty">The integer property associated with the control.</param>
/// <param name="lastValidValue">
/// Default value if the property value is invalid (negative or zero).
/// Assigned the new value on return if valid.
/// </param>
/// <param name="label">The label to display next to the toggle control.</param>
/// <param name="unitLabel">The label indicating the unit of the value.</param>
private void OptionalIntField(SerializedProperty intProperty, ref int lastValidValue, GUIContent label, GUIContent unitLabel)
{
if (lastValidValue <= 0)
{
throw new ArgumentOutOfRangeException("Default value cannot be invalid.");
}
using (new EditorGUILayout.HorizontalScope())
{
var rect = EditorGUILayout.GetControlRect();
using (new EditorGUI.PropertyScope(rect, label, intProperty))
{
bool hadValidValue = (intProperty.intValue > 0);
bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue);
int newValue = intProperty.intValue;
if (needsValidValue)
{
// Force a valid value, otherwise the edit field won't show up
if (newValue <= 0)
{
newValue = lastValidValue;
}
// Make updating the value of the serialized property delayed to allow overriding the
// value the user will input before it's assigned to the property, for validation.
newValue = EditorGUILayout.DelayedIntField(newValue);
if (newValue < 0)
{
newValue = 0;
}
}
else
{
// Force invalid value for consistency, otherwise this breaks Prefab revert
newValue = 0;
}
intProperty.intValue = newValue;
if (newValue > 0)
{
GUILayout.Label(unitLabel, GUILayout.Width(_unitWidth));
// Save valid value as new default. This allows toggling the toggle widget ON and OFF
// without losing the value previously input. This works only while the inspector is
// alive, that is while the object is select, but is better than nothing.
lastValidValue = newValue;
}
else
{
GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth));
}
}
}
}
/// <summary>
/// DoubleField with optional toggle associated with a given SerializedProperty, to enable
/// automatic GUI handlings like Prefab revert menu.
///
/// Valid doubles values are any non-zero positive doubles. Any negative or zero value
/// is considered invalid, and means that the value is considered as not set, which shows
/// up as an unchecked left toggle widget.
///
/// To enforce a valid value when the toggle control is checked by the user, a default valid
/// value is provided <paramref name="lastValidValue"/>. For UI consistency, the last selected
/// valid value is returned in <paramref name="lastValidValue"/>, to allow toggling the field
/// ON and OFF without losing the valid value it previously had.
/// </summary>
/// <param name="doubleProperty">The double property associated with the control.</param>
/// <param name="lastValidValue">
/// Default value if the property value is invalid (negative or zero).
/// Assigned the new value on return if valid.
/// </param>
/// <param name="label">The label to display next to the toggle control.</param>
/// <param name="unitLabel">The label indicating the unit of the value.</param>
private void OptionalDoubleField(SerializedProperty doubleProperty, ref double lastValidValue, GUIContent label, GUIContent unitLabel)
{
if (lastValidValue <= 0.0)
{
throw new ArgumentOutOfRangeException("Default value cannot be invalid.");
}
using (new EditorGUILayout.HorizontalScope())
{
var rect = EditorGUILayout.GetControlRect();
using (new EditorGUI.PropertyScope(rect, label, doubleProperty))
{
bool hadValidValue = (doubleProperty.doubleValue > 0.0);
bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue);
double newValue = doubleProperty.doubleValue;
if (needsValidValue)
{
// Force a valid value, otherwise the edit field won't show up
if (newValue <= 0.0)
{
newValue = lastValidValue;
}
// Make updating the value of the serialized property delayed to allow overriding the
// value the user will input before it's assigned to the property, for validation.
newValue = EditorGUILayout.DelayedDoubleField(newValue);
if (newValue < 0.0)
{
newValue = 0.0;
}
}
else
{
// Force invalid value for consistency, otherwise this breaks Prefab revert
newValue = 0.0;
}
doubleProperty.doubleValue = newValue;
if (newValue > 0.0)
{
GUILayout.Label(unitLabel, GUILayout.Width(_unitWidth));
// Save valid value as new default. This allows toggling the toggle widget ON and OFF
// without losing the value previously input. This works only while the inspector is
// alive, that is while the object is select, but is better than nothing.
lastValidValue = newValue;
}
else
{
GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth));
}
}
}
}
/// <summary>
/// Helper to convert an enum to its integer value.
/// </summary>
/// <typeparam name="TValue">The enum type.</typeparam>
/// <param name="value">The enum value.</param>
/// <returns>The integer value associated with <paramref name="value"/>.</returns>
public static int EnumToInt<TValue>(TValue value) where TValue : Enum => (int)(object)value;
/// <summary>
/// Helper to convert an integer to its enum value.
/// </summary>
/// <typeparam name="TValue">The enum type.</typeparam>
/// <param name="value">The integer value.</param>
/// <returns>The enum value whose integer value is <paramref name="value"/>.</returns>
public static TValue IntToEnum<TValue>(int value) where TValue : Enum => (TValue)(object)value;
/// <summary>
/// EnumPopup with optional toggle associated with a given SerializedProperty, to enable
/// automatic GUI handlings like Prefab revert menu.
///
/// Valid enum values are any value different from <paramref name="nilValue"/>. A value of
/// <paramref name="nilValue"/> is considered invalid, and means that the value is considered as
/// not set, which shows up as an unchecked left toggle widget.
///
/// To enforce a valid value when the toggle control is checked by the user, a default valid value
/// is provided <paramref name="lastValidValue"/> which must be different from <paramref name="nilValue"/>.
/// For UI consistency, the last selected valid value is returned in <paramref name="lastValidValue"/>,
/// to allow toggling the field ON and OFF without losing the valid value it previously had.
/// </summary>
/// <param name="enumProperty">The enum property associated with the control.</param>
/// <param name="nilValue">Value considered to be "invalid", which deselects the toggle control.</param>
/// <param name="lastValidValue">
/// Default value if the property value is not <paramref name="nilValue"/>.
/// Assigned the new value on return if not <paramref name="nilValue"/>.
/// </param>
/// <param name="label">The label to display next to the toggle control.</param>
private void OptionalEnumField<T>(SerializedProperty enumProperty, T nilValue, ref T lastValidValue, GUIContent label) where T : Enum
{
if (nilValue.CompareTo(lastValidValue) == 0)
{
throw new ArgumentOutOfRangeException("Default value cannot be invalid.");
}
using (new EditorGUILayout.HorizontalScope())
{
var rect = EditorGUILayout.GetControlRect();
using (new EditorGUI.PropertyScope(rect, label, enumProperty))
{
bool hadValidValue = (enumProperty.intValue != EnumToInt<T>(nilValue));
bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue);
T newValue = IntToEnum<T>(enumProperty.intValue);
if (needsValidValue)
{
// Force a valid value, otherwise the popup control won't show up
if (newValue.CompareTo(nilValue) == 0)
{
newValue = lastValidValue;
}
newValue = (T)EditorGUILayout.EnumPopup(newValue);
}
else
{
// Force invalid value for consistency, otherwise this breaks Prefab revert
newValue = nilValue;
}
enumProperty.intValue = EnumToInt<T>(newValue);
if (newValue.CompareTo(nilValue) != 0)
{
// Save valid value as new default. This allows toggling the toggle widget ON and OFF
// without losing the value previously input. This works only while the inspector is
// alive, that is while the object is select, but is better than nothing.
lastValidValue = newValue;
}
else
{
GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth));
}
}
}
}
/// <summary>
/// TextField with optional toggle associated with a given SerializedProperty, to enable
/// automatic GUI handlings like Prefab revert menu.
///
/// Valid string values are any non-empty non-space-only string. Any empty string or string
/// made up of only spaces is considered invalid, and means that the value is considered as
/// not set, which shows up as an unchecked left toggle widget.
///
/// To enforce a valid value when the toggle control is checked by the user, a default valid
/// value is provided <paramref name="lastValidValue"/>. For UI consistency, the last selected
/// valid value is returned in <paramref name="lastValidValue"/>, to allow toggling the field
/// ON and OFF without losing the valid value it previously had.
/// </summary>
/// <param name="stringProperty">The string property associated with the control.</param>
/// <param name="lastValidValue">
/// Default value if the property value null or whitespace.
/// Assigned the new value on return if valid.
/// </param>
/// <param name="label">The label to display next to the toggle control.</param>
private void OptionalTextField(SerializedProperty stringProperty, ref string lastValidValue, GUIContent label)
{
if (string.IsNullOrWhiteSpace(lastValidValue))
{
throw new ArgumentOutOfRangeException("Default value cannot be invalid.");
}
using (new EditorGUILayout.HorizontalScope())
{
var rect = EditorGUILayout.GetControlRect();
using (new EditorGUI.PropertyScope(rect, label, stringProperty))
{
bool hadValidValue = !string.IsNullOrWhiteSpace(stringProperty.stringValue);
bool needsValidValue = EditorGUI.ToggleLeft(rect, label, hadValidValue);
string newValue = stringProperty.stringValue;
if (needsValidValue)
{
// Force a valid value, otherwise the edit field won't show up
if (string.IsNullOrWhiteSpace(newValue))
{
newValue = lastValidValue;
}
// Make updating the value of the serialized property delayed to allow overriding the
// value the user will input before it's assigned to the property, for validation.
newValue = EditorGUILayout.DelayedTextField(newValue);
if (string.IsNullOrWhiteSpace(newValue))
{
newValue = string.Empty;
}
}
else
{
// Force invalid value for consistency, otherwise this breaks Prefab revert
newValue = string.Empty;
}
stringProperty.stringValue = newValue;
if (!string.IsNullOrWhiteSpace(newValue))
{
// Save valid value as new default. This allows toggling the toggle widget ON and OFF
// without losing the value previously input. This works only while the inspector is
// alive, that is while the object is select, but is better than nothing.
lastValidValue = newValue;
}
else
{
GUILayout.Label(_anyContent, GUILayout.Width(_anyWidth));
}
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 73af9a06137c0b949b7b86936464626a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) Microsoft Corporation. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 441ac42ff1655da48934640cec15925c
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,89 @@
# MixedReality-WebRTC for Unity
![Unity3D](https://img.shields.io/badge/Unity3D-2018.4%2B-ff4080)
[![Licensed under the MIT License](https://img.shields.io/badge/License-MIT-blue.svg)](https://github.com/microsoft/MixedReality-WebRTC-Unity/blob/master/LICENSE)
[![Holodevelopers channel on Slack](https://img.shields.io/badge/slack-@holodevelopers-%23972299.svg?logo=slack)](https://holodevelopers.slack.com/messages/CN1A7JB3R)
[![Under active development](https://img.shields.io/badge/status-active-green.svg)](https://github.com/microsoft/MixedReality-WebRTC-Unity/commits/master)
[![Build Status](https://dev.azure.com/aipmr/MixedReality-WebRTC-CI/_apis/build/status/mr-webrtc-unity-ci?branchName=master)](https://dev.azure.com/aipmr/MixedReality-WebRTC-CI/_build/latest?definitionId=TODO&branchName=master)
MixedReality-WebRTC for Unity (`com.microsoft.mixedreality.webrtc`) is a package for [Unity](https://unity.com/) to help mixed reality app developers integrate peer-to-peer real-time audio, video, and data communication into their application, and improve their collaborative experience.
## Features
- Enables **multi-track real-time audio / video / data communication** with a remote peer. Multiple connections can be used in parallel to communicate with multiple peers.
- Provides drop-in components for:
- Peer connection and automated media tracks management
- Local audio and video capture from **webcam** and **microphone**
- Remote audio output via an AudioSource component, for **2D and spatial audio**
- Remote video rendering through a Unity texture, for use on any mesh
- **Scene video streaming** from any Unity Camera component (stream-what-you-see)
- Supports in-editor use (Play mode)
- Supports UWP devices, including Microsoft **HoloLens** (x86) and Microsoft **HoloLens 2** (ARM)
- Allows easy use of **[Mixed Reality Capture (MRC)](https://docs.microsoft.com/en-us/windows/mixed-reality/mixed-reality-capture)** to stream the view point of the user for multi-device experiences
MixedReality-WebRTC for Unity is part of the open-source [MixedReality-WebRTC](https://github.com/microsoft/MixedReality-WebRTC/) project hosted on GitHub, and leverages the C# library and native implementation of that project.
## Install
This package can be imported directly into an existing Unity project as a custom package:
- Open Unity and load the Unity project to add the MixedReality-WebRTC library to.
- Download the latest package from [the GitHub Releases page](https://github.com/microsoft/MixedReality-WebRTC/releases) and unzip it somewhere on your local hard disk.
- Go to the **Package Manager** window (_Window_ > _Package Manager_)
- Expand the "+" menu and select "_Add package from disk..._"
![Select Add package from disk...](Documentation~/install1.png)
_Note: In Unity 2018.4, the "+" menu is located at the bottom left of the Package Manager window._
- Select the `package.json` file from the place where the package was unzipped.
This installs the package and add a reference in the currently open Unity project.
![The MixedReality-WebRTC package is added to the current project](Documentation~/install2.png)
See Unity's documentation on [Installing a local package](https://docs.unity3d.com/Manual/upm-ui-local.html) for more details.
## Documentation
The official documentation of the MixedReality-WebRTC project is hosted at [https://microsoft.github.io/MixedReality-WebRTC/manual/unity-integration.html](https://microsoft.github.io/MixedReality-WebRTC/manual/unity-integration.html).
An API reference is also available at [https://microsoft.github.io/MixedReality-WebRTC/api/Microsoft.MixedReality.WebRTC.Unity.html](https://microsoft.github.io/MixedReality-WebRTC/api/Microsoft.MixedReality.WebRTC.Unity.html)
## Samples
Official samples are hosted in a separate package (`com.microsoft.mixedreality.webrtc.samples`) also available from [the GitHub Releases page](https://github.com/microsoft/MixedReality-WebRTC/releases).
## Special considerations for HoloLens 2
- Mixed Reality Capture (MRC) has some inherent limitations:
- **MRC only works up to 1080p** (see the [Mixed reality capture for developers](https://docs.microsoft.com/en-us/windows/mixed-reality/mixed-reality-capture-for-developers) documentation), but the default resolution of the webcam on HoloLens 2 is 2272 x 1278 (see the [Locatable Camera](https://docs.microsoft.com/en-us/windows/mixed-reality/locatable-camera) documentation). In order to access different resolutions, one need to use a different video profile, like the `VideoRecording` or `VideoConferencing` ones. This is handled automatically in the Unity integration layer (see [here](https://github.com/microsoft/MixedReality-WebRTC/blob/9a81c94cf01786398495f8046b645b7b28d987de/libs/Microsoft.MixedReality.WebRTC.Unity/Assets/Microsoft.MixedReality.WebRTC.Unity/Scripts/Media/LocalVideoSource.cs#L210-L237)) if `LocalVideoSource.Mode = Automatic` (default), but must be handled manually if using the C# library directly.
- **MRC requires special permission** to record the content of the screen:
- For shared apps (2D slates), this corresponds to the `screenDuplication` [restricted capability](https://docs.microsoft.com/en-us/windows/uwp/packaging/app-capability-declarations#restricted-capabilities), which **cannot be obtained by third-party applications**.
- For exclusive-mode apps (fullscreen), there is no particular UWP capability, but the recorded content is limited to the application's own content.
- Be sure to use `PreferredVideoCodec = "H264"` to avail of the hardware encoder present on the device; software encoding with _e.g._ VP8 or VP9 codecs is very CPU intensive and strongly discouraged.
## Known Issues
The current version is under active development, and contains known issues inherited from the underlying C# library of the MixedReality-WebRTC sister project:
- HoloLens 2 exhibits some small performance penalty due to the [missing support (#157)](https://github.com/webrtc-uwp/webrtc-uwp-sdk/issues/157) for SIMD-accelerated YUV conversion in WebRTC UWP SDK on ARM.
- H.264 hardware video encoding (UWP only) exhibits some quality degrading (blockiness). See [#74](https://github.com/microsoft/MixedReality-WebRTC/issues/74) and [#101](https://github.com/microsoft/MixedReality-WebRTC/issues/101) for details.
- H.264 is not currently available on Desktop. Only VP8 and VP9 are available instead (software).
## Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit [https://cla.microsoft.com](https://cla.microsoft.com).
When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (_e.g._, label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Reporting security issues and bugs
MixedReality-WebRTC-Unity builds upon the WebRTC implementation provided by Google. Security issues and bugs related to this implementation should be reported to Google.
Security issues and bugs related to MixedReality-WebRTC and MixedReality-WebRTC-Unity themselves, or to WebRTC UWP SDK, should be reported privately, via email, to the Microsoft Security Response Center (MSRC) secure@microsoft.com. You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Further information, including the MSRC PGP key, can be found in the [Security TechCenter](https://technet.microsoft.com/en-us/security/ff852094.aspx).

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: a39d36a50babebf428f22acf02fa12b5
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b12d4c42e6df3304d8c9d14014cb63ce
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 2ac3ec7467bfa7e42a9e59201173e535
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,78 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: ARGBFeedMaterial
m_Shader: {fileID: 4800000, guid: 774ee07e70a065847b90a93e9c377d33, type: 3}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 1
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _GlossMapScale: 1
- _Glossiness: 0.5
- _GlossyReflections: 1
- _Metallic: 0
- _Mirror: 0
- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 1
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4d29421bacacd4b4fb2963ac2f71afe5
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,90 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: YUVFeedMaterial
m_Shader: {fileID: 4800000, guid: f5704ed07eb02e5438d0b6f485061362, type: 3}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 1
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _UPlane:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _VPlane:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _YPlane:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _BumpScale: 1
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _GlossMapScale: 1
- _Glossiness: 0.5
- _GlossyReflections: 1
- _Metallic: 0
- _Mirror: 0
- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _UVSec: 0
- _ZWrite: 1
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 82241a8371f7b1c4db3b05da484e9648
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 0
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,12 @@
{
"name": "Microsoft.MixedReality.WebRTC.Unity",
"references": [],
"optionalUnityReferences": [],
"includePlatforms": [],
"excludePlatforms": [],
"allowUnsafeCode": true,
"overrideReferences": false,
"precompiledReferences": [],
"autoReferenced": true,
"defineConstraints": []
}

View File

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 73d456c551e58394a94a92ad9cd538db
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 891c987abf0c3284299c7a567d6495cb
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a67ae1ff85560ec449930959330fe56b
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: cd91048fd24d72a4db51fbabfe6a4221
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,94 @@
fileFormatVersion: 2
guid: 69f9b3e0684c398499cc494d5b384891
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
'': Any
second:
enabled: 0
settings:
Exclude Editor: 1
Exclude Linux: 1
Exclude Linux64: 1
Exclude LinuxUniversal: 1
Exclude OSXUniversal: 1
Exclude Win: 1
Exclude Win64: 1
Exclude WindowsStoreApps: 0
- first:
Any:
second:
enabled: 0
settings: {}
- first:
Editor: Editor
second:
enabled: 0
settings:
CPU: AnyCPU
DefaultValueInitialized: true
OS: AnyOS
- first:
Facebook: Win
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Facebook: Win64
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Linux
second:
enabled: 0
settings:
CPU: x86
- first:
Standalone: Linux64
second:
enabled: 0
settings:
CPU: x86_64
- first:
Standalone: OSXUniversal
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win64
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Windows Store Apps: WindowsStoreApps
second:
enabled: 1
settings:
CPU: ARM
DontProcess: false
PlaceholderPath:
SDK: UWP
ScriptingBackend: AnyScriptingBackend
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 87283c81240e3c845ac10f528ea8d70e
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,94 @@
fileFormatVersion: 2
guid: 253af5590ef9c674c96e0ba020b8ca2a
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
'': Any
second:
enabled: 0
settings:
Exclude Editor: 1
Exclude Linux: 1
Exclude Linux64: 1
Exclude LinuxUniversal: 1
Exclude OSXUniversal: 1
Exclude Win: 1
Exclude Win64: 1
Exclude WindowsStoreApps: 0
- first:
Any:
second:
enabled: 0
settings: {}
- first:
Editor: Editor
second:
enabled: 0
settings:
CPU: AnyCPU
DefaultValueInitialized: true
OS: AnyOS
- first:
Facebook: Win
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Facebook: Win64
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Linux
second:
enabled: 0
settings:
CPU: x86
- first:
Standalone: Linux64
second:
enabled: 0
settings:
CPU: x86_64
- first:
Standalone: OSXUniversal
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win64
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Windows Store Apps: WindowsStoreApps
second:
enabled: 1
settings:
CPU: X86
DontProcess: false
PlaceholderPath:
SDK: UWP
ScriptingBackend: AnyScriptingBackend
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 86c4a2b485ca86c49bc51121c18352b7
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,94 @@
fileFormatVersion: 2
guid: 25357693012347b4ba514e0974b57edb
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
'': Any
second:
enabled: 0
settings:
Exclude Editor: 1
Exclude Linux: 1
Exclude Linux64: 1
Exclude LinuxUniversal: 1
Exclude OSXUniversal: 1
Exclude Win: 1
Exclude Win64: 1
Exclude WindowsStoreApps: 0
- first:
Any:
second:
enabled: 0
settings: {}
- first:
Editor: Editor
second:
enabled: 0
settings:
CPU: AnyCPU
DefaultValueInitialized: true
OS: AnyOS
- first:
Facebook: Win
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Facebook: Win64
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Linux
second:
enabled: 0
settings:
CPU: x86
- first:
Standalone: Linux64
second:
enabled: 0
settings:
CPU: x86_64
- first:
Standalone: OSXUniversal
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win64
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Windows Store Apps: WindowsStoreApps
second:
enabled: 1
settings:
CPU: X64
DontProcess: false
PlaceholderPath:
SDK: UWP
ScriptingBackend: AnyScriptingBackend
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 9b7dc87675412e3498abe31bb3f21ea9
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6a2e77d486b78dc4fb1db627622c466f
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,107 @@
fileFormatVersion: 2
guid: 0d44fdee241b5c249a8ea819f5ef9e9a
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
'': Any
second:
enabled: 0
settings:
Exclude Android: 1
Exclude Editor: 0
Exclude Linux: 1
Exclude Linux64: 1
Exclude LinuxUniversal: 1
Exclude OSXUniversal: 1
Exclude Win: 0
Exclude Win64: 1
Exclude WindowsStoreApps: 1
- first:
Android: Android
second:
enabled: 0
settings:
CPU: ARMv7
- first:
Any:
second:
enabled: 0
settings: {}
- first:
Editor: Editor
second:
enabled: 1
settings:
CPU: x86
DefaultValueInitialized: true
OS: Windows
- first:
Facebook: Win
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Facebook: Win64
second:
enabled: 0
settings:
CPU: None
- first:
Standalone: Linux
second:
enabled: 0
settings:
CPU: x86
- first:
Standalone: Linux64
second:
enabled: 0
settings:
CPU: x86_64
- first:
Standalone: LinuxUniversal
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: OSXUniversal
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win
second:
enabled: 1
settings:
CPU: AnyCPU
- first:
Standalone: Win64
second:
enabled: 0
settings:
CPU: None
- first:
Windows Store Apps: WindowsStoreApps
second:
enabled: 0
settings:
CPU: X86
DontProcess: false
PlaceholderPath:
SDK: AnySDK
ScriptingBackend: AnyScriptingBackend
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3628030434909ed449e803bbea25bbda
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,33 @@
fileFormatVersion: 2
guid: 9d631da17d93bc94ca472a4d7547e4f2
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
Any:
second:
enabled: 1
settings: {}
- first:
Editor: Editor
second:
enabled: 1
settings:
DefaultValueInitialized: true
- first:
Windows Store Apps: WindowsStoreApps
second:
enabled: 1
settings:
CPU: AnyCPU
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,107 @@
fileFormatVersion: 2
guid: 39b57d3e3a74fe944826f061f56eb4ee
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
'': Any
second:
enabled: 0
settings:
Exclude Android: 1
Exclude Editor: 0
Exclude Linux: 1
Exclude Linux64: 1
Exclude LinuxUniversal: 1
Exclude OSXUniversal: 1
Exclude Win: 1
Exclude Win64: 0
Exclude WindowsStoreApps: 1
- first:
Android: Android
second:
enabled: 0
settings:
CPU: ARMv7
- first:
Any:
second:
enabled: 0
settings: {}
- first:
Editor: Editor
second:
enabled: 1
settings:
CPU: x86_64
DefaultValueInitialized: true
OS: Windows
- first:
Facebook: Win
second:
enabled: 0
settings:
CPU: None
- first:
Facebook: Win64
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Linux
second:
enabled: 0
settings:
CPU: x86
- first:
Standalone: Linux64
second:
enabled: 0
settings:
CPU: x86_64
- first:
Standalone: LinuxUniversal
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: OSXUniversal
second:
enabled: 0
settings:
CPU: AnyCPU
- first:
Standalone: Win
second:
enabled: 0
settings:
CPU: None
- first:
Standalone: Win64
second:
enabled: 1
settings:
CPU: AnyCPU
- first:
Windows Store Apps: WindowsStoreApps
second:
enabled: 0
settings:
CPU: AnyCPU
DontProcess: false
PlaceholderPath:
SDK: AnySDK
ScriptingBackend: AnyScriptingBackend
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 702ba65cbaecaec4db83b42719f4c0b1
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,32 @@
fileFormatVersion: 2
guid: f92671604373d0b4b8946b94842d6e91
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
Android: Android
second:
enabled: 1
settings: {}
- first:
Any:
second:
enabled: 0
settings: {}
- first:
Editor: Editor
second:
enabled: 0
settings:
DefaultValueInitialized: true
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8351cde9ca1c3c64684120f3b3c45cc5
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,54 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
public static class Android
{
/// <summary>
/// Check if the Android interop layer for Android is already initialized.
/// </summary>
public static bool IsInitialized { get; private set; } = false;
/// <summary>
/// Initialize the MixedReality-WebRTC library interop layer for Android.
///
/// This is automatically called by the various library API functions, and
/// can be safely called multiple times (no-op after first call).
/// </summary>
public static void Initialize()
{
#if !UNITY_EDITOR && UNITY_ANDROID
if (IsInitialized)
{
return;
}
// See webrtc/examples/unityplugin/ANDROID_INSTRUCTION
// Below is equivalent of this java code:
// PeerConnectionFactory.InitializationOptions.Builder builder = PeerConnectionFactory.InitializationOptions.Builder(UnityPlayer.currentActivity);
// builder.setNativeLibraryName("mrwebrtc");
// PeerConnectionFactory.InitializationOptions options = builder.createInitializationOptions();
// PeerConnectionFactory.initialize(options);
AndroidJavaClass playerClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
Debug.Assert(playerClass != null);
AndroidJavaObject activity = playerClass.GetStatic<AndroidJavaObject>("currentActivity");
Debug.Assert(activity != null);
Debug.Log("Found Unity Java activity.");
AndroidJavaClass webrtcClass = new AndroidJavaClass("org.webrtc.PeerConnectionFactory");
Debug.Assert(webrtcClass != null);
AndroidJavaClass initOptionsClass = new AndroidJavaClass("org.webrtc.PeerConnectionFactory$InitializationOptions");
Debug.Assert(initOptionsClass != null);
AndroidJavaObject builder = initOptionsClass.CallStatic<AndroidJavaObject>("builder", new object[1] { activity });
Debug.Assert(builder != null);
builder.Call<AndroidJavaObject>("setNativeLibraryName", new object[1] { "mrwebrtc" });
AndroidJavaObject options = builder.Call<AndroidJavaObject>("createInitializationOptions");
webrtcClass.CallStatic("initialize", new object[1] { options });
IsInitialized = true;
Debug.Log("Initialized MixedReality-WebRTC Java binding for Android.");
#endif
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 71cf2449ddf984f45a44a0cefb983a31
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3a07576d8352096448e6eeccfa4e2024
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,53 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEngine.XR;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Attribute for a <see xref="UnityEngine.Camera"/> property used by <see cref="SceneVideoSource"/>
/// to capture the content of a framebuffer, and for which some constraints on stereoscopic rendering
/// options need to be enforced (and errors can be reported in the Editor if they are not followed).
/// </summary>
/// <seealso cref="SceneVideoSource"/>
public class CaptureCameraAttribute : PropertyAttribute
{
/// <summary>
/// Validate that a given <see xref="UnityEngine.Camera"/> instance can be used for framebuffer
/// capture by <see cref="SceneVideoSource"/> based on the XR settings currently in effect.
/// </summary>
/// <param name="camera">The camera instance to test the settings of.</param>
/// <exception xref="System.NotSupportedException">
/// The camera has settings not compatible with its use with <see cref="SceneVideoSource"/>.
/// </exception>
/// <seealso xref="CaptureCameraDrawer.Validate(Camera)"/>
public static void Validate(Camera camera)
{
if (camera != null)
{
if (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.MultiPass)
{
// Ensure camera is not rendering to both eyes in multi-pass stereo, otherwise the command buffer
// is executed twice (once per eye) and will produce twice as many frames, which leads to stuttering
// when playing back the video stream resulting from combining those frames.
if (camera.stereoTargetEye == StereoTargetEyeMask.Both)
{
throw new NotSupportedException("Capture camera renders both eyes in multi-pass stereoscopic rendering. This is not" +
" supported by the capture mechanism which cannot discriminate them. Set Camera.stereoTargetEye to either Left or" +
" Right, or use a different XRSettings.stereoRenderingMode.");
}
}
#if !UNITY_2019_1_OR_NEWER
else if ((XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassInstanced)
|| (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassMultiview)) // same as instanced (OpenGL)
{
throw new NotSupportedException("Capture camera does not support single-pass instanced stereoscopic rendering before Unity 2019.1.");
}
#endif
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fb1356a40780d844785dd91b4ae7e9bf
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,20 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
// This file contains a collection of attributes only used for editing purpose,
// generally to customize the Inspector window. They need to be located in the
// runtime assembly to be attached to runtime object fields, but do not influence
// their runtime behavior.
namespace Microsoft.MixedReality.WebRTC.Unity.Editor
{
/// <summary>
/// Attribute to display a boolean field with a toggle on its left, prefixing
/// the actual text of the field.
/// </summary>
public class ToggleLeftAttribute : PropertyAttribute
{
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9233eb3e4d94ea547abfc93030c3dfcc
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,74 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Text.RegularExpressions;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Attribute for string properties representing an SDP token, which has constraints
/// on the allowed characters it can contain, as defined in the SDP RFC.
///
/// See https://tools.ietf.org/html/rfc4566#page-43 for details.
/// </summary>
public class SdpTokenAttribute : PropertyAttribute
{
/// <summary>
/// Allow empty tokens, that is a string property which is <c>null</c> or an empty string.
/// This is not valid in the RFC, but can be allowed as a property value to represent a default
/// value generated at runtime by the implementation instead of being provided by the user.
///
/// This is typically used as an argument to <see cref="Validate(string, bool)"/>.
/// </summary>
/// <value><c>true</c> to allow the property to be <c>null</c> or empty.</value>
public bool AllowEmpty { get; }
/// <param name="allowEmpty">Value of <see cref="AllowEmpty"/>.</param>
public SdpTokenAttribute(bool allowEmpty = true)
{
AllowEmpty = allowEmpty;
}
/// <summary>
/// Validate an SDP token name against the list of allowed characters:
/// - Symbols <c>[!#$%'*+-.^_`{|}~&amp;]</c>
/// - Alphanumerical characters <c>[A-Za-z0-9]</c>
///
/// If the validation fails, the method throws an exception.
/// </summary>
/// <remarks>
/// See https://tools.ietf.org/html/rfc4566#page-43 for 'token' reference.
/// </remarks>
/// <param name="name">The token name to validate.</param>
/// <param name="allowEmpty">
/// <c>true</c> to allow the property to be <c>null</c> or empty without raising an exception.
/// </param>
/// <exception xref="System.ArgumentNullException">
/// <paramref name="name"/> is <c>null</c> or empty, and <see cref="AllowEmpty"/> is <c>false</c>.
/// </exception>
/// <exception xref="System.ArgumentException">
/// <paramref name="name"/> contains invalid characters not allowed for a SDP 'token' item.
/// </exception>
public static void Validate(string name, bool allowEmpty = true)
{
if (string.IsNullOrEmpty(name))
{
if (allowEmpty)
{
return;
}
throw new ArgumentNullException("Invalid null SDP token.");
}
var regex = new Regex("^[A-Za-z0-9!#$%&'*+-.^_`{|}~]+$");
if (regex.IsMatch(name))
{
return;
}
throw new ArgumentException($"SDP token '{name}' contains invalid characters.");
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: aaf6d8dd252cb2145adc737313c39834
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 0140a508c0b94144780f23ed0b892271
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,93 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEngine.Events;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Unity event corresponding to a new audio stream being started.
/// </summary>
[Serializable]
public class AudioStreamStartedEvent : UnityEvent<IAudioSource>
{ };
/// <summary>
/// Unity event corresponding to an on-going audio stream being stopped.
/// </summary>
[Serializable]
public class AudioStreamStoppedEvent : UnityEvent<IAudioSource>
{ };
/// <summary>
/// Endpoint for a WebRTC remote audio track.
/// </summary>
/// <remarks>
/// Setting this on an audio <see cref="MediaLine"/> will enable the corresponding transceiver to receive.
/// A remote track will be exposed through <see cref="AudioTrack"/> once a connection is established.
/// The audio track can optionally be played locally with an <see cref="AudioRenderer"/>.
/// </remarks>
[AddComponentMenu("MixedReality-WebRTC/Audio Receiver")]
public class AudioReceiver : MediaReceiver
{
/// <summary>
/// Remote audio track receiving data from the remote peer.
/// </summary>
/// <remarks>
/// This is <c>null</c> until:
/// <list type="bullet">
/// <description><see cref="MediaLine.Transceiver"/> is set to a non-null value, and</description>
/// <description>the remote peer starts sending data to the paired transceiver after a session negotiation.</description>
/// </list>
/// </remarks>
public RemoteAudioTrack AudioTrack { get; private set; }
/// <summary>
/// Event raised when the audio stream started.
///
/// When this event is raised, the followings are true:
/// - The <see cref="Track"/> property is a valid remote audio track.
/// - The <see cref="MediaReceiver.IsLive"/> property is <c>true</c>.
/// </summary>
/// <remarks>
/// This event is raised from the main Unity thread to allow Unity object access.
/// </remarks>
public AudioStreamStartedEvent AudioStreamStarted = new AudioStreamStartedEvent();
/// <summary>
/// Event raised when the audio stream stopped.
///
/// When this event is raised, the followings are true:
/// - The <see cref="MediaReceiver.IsLive"/> property is <c>false</c>.
/// </summary>
/// <remarks>
/// This event is raised from the main Unity thread to allow Unity object access.
/// </remarks>
public AudioStreamStoppedEvent AudioStreamStopped = new AudioStreamStoppedEvent();
/// <inheritdoc/>
public override MediaKind MediaKind => MediaKind.Audio;
/// <inheritdoc/>
public override MediaTrack Track => AudioTrack;
/// <inheritdoc/>
protected internal override void OnPaired(MediaTrack track)
{
var remoteAudioTrack = (RemoteAudioTrack)track;
Debug.Assert(Track == null);
AudioTrack = remoteAudioTrack;
AudioStreamStarted.Invoke(remoteAudioTrack);
}
/// <inheritdoc/>
protected internal override void OnUnpaired(MediaTrack track)
{
Debug.Assert(Track == track);
AudioTrack = null;
AudioStreamStopped.Invoke((RemoteAudioTrack)track);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ac54f2f6b8185d444b4a85e048778344
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,171 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System.Collections.Generic;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Utility component used to play audio frames obtained from a WebRTC audio source.
/// </summary>
/// <remarks>
/// Calling <see cref="StartRendering(IAudioSource)"/> and <see cref="StopRendering(IAudioSource)"/>
/// will start/stop playing the passed <see cref="IAudioSource"/> through a <see cref="UnityEngine.AudioSource"/>
/// component on the same object, if there is one.
///
/// The component will play only while enabled.
/// </remarks>
/// <seealso cref="AudioReceiver"/>
[AddComponentMenu("MixedReality-WebRTC/Audio Renderer")]
[RequireComponent(typeof(UnityEngine.AudioSource))]
public class AudioRenderer : MonoBehaviour
{
/// <summary>
/// If true, pad buffer underruns with a sine wave. This will cause artifacts on underruns.
/// Use for debugging.
/// </summary>
public bool PadWithSine = false;
// Local storage of audio data to be fed to the output
private AudioTrackReadBuffer _readBuffer = null;
// _readBuffer can be accessed concurrently by audio thread (OnAudioFilterRead)
// and main thread (StartStreaming, StopStreaming).
private readonly object _readBufferLock = new object();
// Cached sample rate since we can't access this in OnAudioFilterRead.
private int _audioSampleRate = 0;
// Source that this renderer is currently subscribed to.
private IAudioSource _source;
protected void Awake()
{
AudioSettings.OnAudioConfigurationChanged += OnAudioConfigurationChanged;
OnAudioConfigurationChanged(deviceWasChanged: true);
}
protected void OnDestroy()
{
AudioSettings.OnAudioConfigurationChanged -= OnAudioConfigurationChanged;
}
protected void OnEnable()
{
if (_source != null)
{
StartReadBuffer();
}
}
protected void OnDisable()
{
if (_source != null)
{
StopReadBuffer();
}
}
/// <summary>
/// Start rendering the passed source.
/// </summary>
/// <remarks>
/// Can be used to handle <see cref="AudioReceiver.AudioStreamStarted"/>.
/// </remarks>
public void StartRendering(IAudioSource source)
{
Debug.Assert(_source == null);
_source = source;
if (isActiveAndEnabled)
{
StartReadBuffer();
}
}
/// <summary>
/// Stop rendering the passed source. Must be called with the same source passed to <see cref="StartRendering(IAudioSource)"/>
/// </summary>
/// <remarks>
/// Can be used to handle <see cref="AudioReceiver.AudioStreamStopped"/>.
/// </remarks>
public void StopRendering(IAudioSource source)
{
Debug.Assert(_source == source);
if (isActiveAndEnabled)
{
StopReadBuffer();
}
_source = null;
}
protected void OnAudioFilterRead(float[] data, int channels)
{
var behavior = PadWithSine ?
AudioTrackReadBuffer.PadBehavior.PadWithSine :
AudioTrackReadBuffer.PadBehavior.PadWithZero;
bool hasRead = false;
bool hasOverrun = false;
bool hasUnderrun = false;
lock (_readBufferLock)
{
// Read and use buffer under lock to prevent disposal while in use.
if (_readBuffer != null)
{
_readBuffer.Read(_audioSampleRate, channels, data,
out int numSamplesRead, out hasOverrun, behavior);
hasRead = true;
hasUnderrun = numSamplesRead < data.Length;
}
}
if (hasRead)
{
// Uncomment for debugging.
//if (hasOverrun)
//{
// Debug.LogWarning($"Overrun in track {Track.Name}");
//}
//if (hasUnderrun)
//{
// Debug.LogWarning($"Underrun in track {Track.Name}");
//}
return;
}
// If there is no track/buffer, fill array with 0s.
for (int i = 0; i < data.Length; ++i)
{
data[i] = 0.0f;
}
}
private void OnAudioConfigurationChanged(bool deviceWasChanged)
{
_audioSampleRate = AudioSettings.outputSampleRate;
}
private void StartReadBuffer()
{
Debug.Assert(_readBuffer == null);
// OnAudioFilterRead reads the variable concurrently, but the update is atomic
// so we don't need a lock.
_readBuffer = _source.CreateReadBuffer();
}
private void StopReadBuffer()
{
lock (_readBufferLock)
{
// Under lock so OnAudioFilterRead won't use the buffer while/after it is disposed.
_readBuffer.Dispose();
_readBuffer = null;
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ecc506ef6d3213043bc0529b3f591d39
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,48 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// This component represents an audio track source generating audio frames for one or more
/// audio tracks.
/// </summary>
/// <seealso cref="MicrophoneSource"/>
public abstract class AudioTrackSource : MediaTrackSource
{
/// <summary>
/// Audio track source object from the underlying C# library that this component encapsulates.
///
/// The object is owned by this component, which will create it and dispose of it automatically.
/// </summary>
public WebRTC.AudioTrackSource Source { get; private set; } = null;
/// <inheritdoc/>
public override MediaKind MediaKind => MediaKind.Audio;
/// <inheritdoc/>
public override bool IsLive => Source != null;
protected void AttachSource(WebRTC.AudioTrackSource source)
{
Source = source;
AttachToMediaLines();
}
protected void DisposeSource()
{
if (Source != null)
{
DetachFromMediaLines();
// Audio track sources are disposable objects owned by the user (this component)
Source.Dispose();
Source = null;
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fc15459d5ebc59246a46627053e01b30
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,42 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Diagnostics;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Abstract base component for a custom video source delivering raw video frames
/// directly to the WebRTC implementation.
/// </summary>
public abstract class CustomVideoSource<T> : VideoTrackSource where T : IVideoFrameStorage
{
protected virtual void OnEnable()
{
Debug.Assert(Source == null);
// Create the external source
//< TODO - Better abstraction
if (typeof(T) == typeof(I420AVideoFrameStorage))
{
AttachSource(ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested));
}
else if (typeof(T) == typeof(Argb32VideoFrameStorage))
{
AttachSource(ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested));
}
else
{
throw new NotSupportedException("This frame storage is not supported. Use I420AVideoFrameStorage or Argb32VideoFrameStorage.");
}
}
protected virtual void OnDisable()
{
DisposeSource();
}
protected abstract void OnFrameRequested(in FrameRequest request);
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 26676644c28327f42945c1e5b78fb210
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,460 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Media line abstraction for a peer connection.
///
/// This container binds together a source component (<see cref="MediaTrackSource"/>) and/or a receiver
/// component (<see cref="MediaReceiver"/>) on one side, with a transceiver on the other side. The media line
/// is a declarative representation of this association, which is then turned into a binding by the implementation
/// during an SDP negotiation. This forms the core of the algorithm allowing automatic transceiver pairing
/// between the two peers based on the declaration of intent of the user.
///
/// Assigning Unity components to the <see cref="Source"/> and <see cref="Receiver"/> properties serves
/// as an indication of the user intent to send and/or receive media through the transceiver, and is
/// used during the SDP exchange to derive the <see xref="WebRTC.Transceiver.Direction"/> to negotiate.
/// After the SDP negotiation is completed, the <see cref="Transceiver"/> property refers to the transceiver
/// associated with this media line, and which the sender and receiver will use.
///
/// Users typically interact with this class through the peer connection transceiver collection in the Unity
/// inspector window, though direct manipulation via code is also possible.
/// </summary>
[Serializable]
public class MediaLine
{
/// <summary>
/// Kind of media of the media line and its attached transceiver.
///
/// This is assiged when the media line is created with <see cref="PeerConnection.AddMediaLine(MediaKind)"/>
/// and is immutable for the lifetime of the peer connection.
/// </summary>
public MediaKind MediaKind => _mediaKind;
/// <summary>
/// Media source producing the media to send through the transceiver attached to this media line.
/// </summary>
/// <remarks>
/// This must be an instance of a class derived from <see cref="AudioTrackSource"/> or <see cref="VideoTrackSource"/>
/// depending on whether <see cref="MediaKind"/> is <see xref="Microsoft.MixedReality.WebRTC.MediaKind.Audio"/>
/// or <see xref="Microsoft.MixedReality.WebRTC.MediaKind.Video"/>, respectively.
///
/// Internally the peer connection will automatically create and manage a media track to bridge the
/// media source with the transceiver.
///
/// If this is non-<c>null</c> then the peer connection will negotiate sending some media, otherwise
/// it will signal the remote peer that it does not wish to send (receive-only or inactive).
///
/// If <see cref="Transceiver"/> is valid, that is a first session negotiation has already been completed,
/// then changing this value raises a <see xref="WebRTC.PeerConnection.RenegotiationNeeded"/> event on the
/// peer connection of <see cref="Transceiver"/>.
///
/// Must be changed on the main Unity app thread.
/// </remarks>
public MediaTrackSource Source
{
get { return _source; }
set
{
if (_source == value)
{
return;
}
if (value != null && value.MediaKind != MediaKind)
{
throw new ArgumentException("Wrong media kind", nameof(Receiver));
}
var oldTrack = LocalTrack;
if (_source != null && _peer.IsAwake)
{
_source.OnRemovedFromMediaLine(this);
}
_source = value;
if (_source != null && _peer.IsAwake)
{
_source.OnAddedToMediaLine(this);
CreateLocalTrackIfNeeded();
}
// Dispose the old track *after* replacing it with the new one
// so that there is no gap in sending.
oldTrack?.Dispose();
// Whatever the change, keep the direction consistent.
UpdateTransceiverDesiredDirection();
}
}
/// <summary>
/// Name of the local media track this component will create when calling <see cref="StartCaptureAsync"/>.
/// If left empty, the implementation will generate a unique name for the track (generally a GUID).
/// </summary>
/// <remarks>
/// This value must comply with the 'msid' attribute rules as defined in
/// https://tools.ietf.org/html/draft-ietf-mmusic-msid-05#section-2, which in
/// particular constraints the set of allowed characters to those allowed for a
/// 'token' element as specified in https://tools.ietf.org/html/rfc4566#page-43:
/// - Symbols [!#$%'*+-.^_`{|}~] and ampersand &amp;
/// - Alphanumerical characters [A-Za-z0-9]
///
/// Users can manually test if a string is a valid SDP token with the utility method
/// <see cref="SdpTokenAttribute.Validate(string, bool)"/>. The property setter will
/// use this and throw an <see cref="ArgumentException"/> if the token is not a valid
/// SDP token.
///
/// The sender track name is taken into account each time the track is created. If this
/// property is assigned after the track was created (already negotiated), the value will
/// be used only for the next negotiation, and the current sender track will keep its
/// current track name (either a previous value or a generated one).
/// </remarks>
/// <seealso cref="SdpTokenAttribute.Validate(string, bool)"/>
public string SenderTrackName
{
get { return _senderTrackName; }
set
{
SdpTokenAttribute.Validate(_senderTrackName);
_senderTrackName = value;
}
}
/// <summary>
/// Local track created from a local source.
/// </summary>
/// <remarks>
/// This is non-<c>null</c> when a live source is attached to the <see cref="MediaLine"/>, and the owning
/// <see cref="PeerConnection"/> is connected.
/// </remarks>
public LocalMediaTrack LocalTrack => Transceiver?.LocalTrack;
/// <summary>
/// Media receiver consuming the media received through the transceiver attached to this media line.
/// </summary>
/// <remarks>
/// This must be an instance of a class derived from <see cref="AudioReceiver"/> or <see cref="VideoReceiver"/>
/// depending on whether <see cref="MediaKind"/> is <see xref="Microsoft.MixedReality.WebRTC.MediaKind.Audio"/>
/// or <see xref="Microsoft.MixedReality.WebRTC.MediaKind.Video"/>, respectively.
///
/// If this is non-<c>null</c> then the peer connection will negotiate receiving some media, otherwise
/// it will signal the remote peer that it does not wish to receive (send-only or inactive).
///
/// If <see cref="Transceiver"/> is valid, that is a first session negotiation has already been conducted,
/// then changing this value raises a <see xref="WebRTC.PeerConnection.RenegotiationNeeded"/> event on the
/// peer connection of <see cref="Transceiver"/>.
///
/// Must be changed on the main Unity app thread.
/// </remarks>
public MediaReceiver Receiver
{
get { return _receiver; }
set
{
if (_receiver == value)
{
return;
}
if (value != null && value.MediaKind != MediaKind)
{
throw new ArgumentException("Wrong media kind", nameof(Receiver));
}
if (_receiver != null && _peer.IsAwake)
{
if (_remoteTrack != null)
{
_receiver.OnUnpaired(_remoteTrack);
}
_receiver.OnRemovedFromMediaLine(this);
}
_receiver = value;
if (_receiver != null && _peer.IsAwake)
{
_receiver.OnAddedToMediaLine(this);
if (_remoteTrack != null)
{
_receiver.OnPaired(_remoteTrack);
}
}
// Whatever the change, keep the direction consistent.
UpdateTransceiverDesiredDirection();
}
}
/// <summary>
/// Transceiver attached with this media line.
///
/// On the offering peer this changes during <see cref="PeerConnection.StartConnection"/>, while this is updated by
/// <see cref="PeerConnection.HandleConnectionMessageAsync(string, string)"/> when receiving an offer on the answering peer.
///
/// Because transceivers cannot be destroyed, once this property is assigned a non-<c>null</c> value it keeps that
/// value until the peer connection owning the media line is closed.
/// </summary>
public Transceiver Transceiver { get; private set; }
/// <summary>
/// <see cref="PeerConnection"/> owning this <see cref="MediaLine"/>.
/// </summary>
public PeerConnection Peer
{
get => _peer;
internal set
{
Debug.Assert(Peer == null || Peer == value);
_peer = value;
}
}
#region Private fields
private PeerConnection _peer;
/// <summary>
/// Backing field to serialize the <see cref="MediaKind"/> property.
/// </summary>
/// <seealso cref="MediaKind"/>
[SerializeField]
private MediaKind _mediaKind;
/// <summary>
/// Backing field to serialize the <see cref="Source"/> property.
/// </summary>
/// <seealso cref="Source"/>
[SerializeField]
private MediaTrackSource _source;
/// <summary>
/// Backing field to serialize the <see cref="Receiver"/> property.
/// </summary>
/// <seealso cref="Receiver"/>
[SerializeField]
private MediaReceiver _receiver;
/// <summary>
/// Backing field to serialize the sender track's name.
/// </summary>
[SerializeField]
[Tooltip("SDP track name")]
[SdpToken(allowEmpty: true)]
private string _senderTrackName;
// Cache for the remote track opened by the latest negotiation.
// Comparing it to Transceiver.RemoteTrack will tell if streaming has just started/stopped.
private MediaTrack _remoteTrack;
#endregion
/// <summary>
/// Constructor called internally by <see cref="PeerConnection.AddMediaLine(MediaKind)"/>.
/// </summary>
/// <param name="kind">Immutable value assigned to the <see cref="MediaKind"/> property on construction.</param>
internal MediaLine(PeerConnection peer, MediaKind kind)
{
Peer = peer;
_mediaKind = kind;
}
private void UpdateTransceiverDesiredDirection()
{
if (Transceiver != null)
{
// Avoid races on the desired direction by limiting changes to the main thread.
// Note that EnsureIsMainAppThread cannot be used if _peer is not awake, so only
// check when there is a transceiver (meaning _peer is enabled).
Peer.EnsureIsMainAppThread();
bool wantsSend = _source != null && _source.IsLive;
bool wantsRecv = (_receiver != null);
Transceiver.DesiredDirection = Transceiver.DirectionFromSendRecv(wantsSend, wantsRecv);
}
}
// Initializes and attaches a local track if all the preconditions are satisfied.
private void CreateLocalTrackIfNeeded()
{
if (_source != null && _source.IsLive && Transceiver != null)
{
if (MediaKind == MediaKind.Audio)
{
var audioSource = (AudioTrackSource)_source;
var initConfig = new LocalAudioTrackInitConfig
{
trackName = _senderTrackName
};
var audioTrack = LocalAudioTrack.CreateFromSource(audioSource.Source, initConfig);
Transceiver.LocalAudioTrack = audioTrack;
}
else
{
Debug.Assert(MediaKind == MediaKind.Video);
var videoSource = (VideoTrackSource)_source;
var initConfig = new LocalVideoTrackInitConfig
{
trackName = _senderTrackName
};
var videoTrack = LocalVideoTrack.CreateFromSource(videoSource.Source, initConfig);
Transceiver.LocalVideoTrack = videoTrack;
}
}
}
// Detaches and disposes the local track if there is one.
private void DestroyLocalTrackIfAny()
{
var localTrack = Transceiver?.LocalTrack;
if (localTrack != null)
{
if (MediaKind == MediaKind.Audio)
{
Transceiver.LocalAudioTrack = null;
}
else
{
Debug.Assert(MediaKind == MediaKind.Video);
Transceiver.LocalVideoTrack = null;
}
localTrack.Dispose();
}
}
internal void UpdateAfterSdpReceived()
{
Debug.Assert(Transceiver != null);
// Callbacks must be called on the main Unity app thread.
Peer.EnsureIsMainAppThread();
var newRemoteTrack = Transceiver.RemoteTrack;
if (_receiver != null)
{
bool wasReceiving = _remoteTrack != null;
bool isReceiving = newRemoteTrack != null;
if (isReceiving && !wasReceiving)
{
// Transceiver started receiving, and user actually wants to receive
_receiver.OnPaired(newRemoteTrack);
}
else if (!isReceiving && wasReceiving)
{
// Transceiver stopped receiving (user intent does not matter here)
_receiver.OnUnpaired(_remoteTrack);
}
}
_remoteTrack = newRemoteTrack;
}
/// <summary>
/// Pair the given transceiver with the current media line.
/// </summary>
/// <param name="tr">The transceiver to pair with.</param>
/// <exception cref="InvalidTransceiverMediaKindException">
/// The transceiver associated in the offer with the same media line index as the current media line
/// has a different media kind than the media line. This is generally a result of the two peers having
/// mismatching media line configurations.
/// </exception>
internal void PairTransceiver(Transceiver tr)
{
Peer.EnsureIsMainAppThread();
Debug.Assert(tr != null);
Debug.Assert(Transceiver == null);
// Check consistency before assigning
if (tr.MediaKind != MediaKind)
{
throw new InvalidTransceiverMediaKindException();
}
Transceiver = tr;
// Initialize the transceiver direction in sync with Sender and Receiver.
UpdateTransceiverDesiredDirection();
// Start the local track if there is a live source.
CreateLocalTrackIfNeeded();
}
internal void UnpairTransceiver()
{
Peer.EnsureIsMainAppThread();
// Notify the receiver.
if (_remoteTrack != null && _receiver != null)
{
_receiver.OnUnpaired(_remoteTrack);
}
_remoteTrack = null;
DestroyLocalTrackIfAny();
Transceiver = null;
}
/// <summary>
/// Internal callback when the underlying source providing media frames to the sender track
/// is created, and therefore the local media track needs to be created too.
/// </summary>
/// <seealso cref="AudioTrackSource.AttachSource(WebRTC.AudioTrackSource)"/>
/// <seealso cref="VideoTrackSource.AttachSource(WebRTC.VideoTrackSource)"/>
internal void AttachSource()
{
Debug.Assert(Source.IsLive);
CreateLocalTrackIfNeeded();
UpdateTransceiverDesiredDirection();
}
/// <summary>
/// Internal callback when the underlying source providing media frames to the sender track
/// is destroyed, and therefore the local media track needs to be destroyed too.
/// </summary>
/// <seealso cref="AudioTrackSource.DisposeSource"/>
/// <seealso cref="VideoTrackSource.DisposeSource"/>
internal void DetachSource()
{
Debug.Assert(Source.IsLive);
DestroyLocalTrackIfAny();
UpdateTransceiverDesiredDirection();
}
internal void OnReceiverDestroyed()
{
// Different from `Receiver = null`. Don't need to call Receiver.OnRemovedFromMediaLine
// or Receiver.OnUnpaired since the Receiver itself has called this.
_receiver = null;
UpdateTransceiverDesiredDirection();
}
// Called by PeerConnection.Awake.
internal void Awake()
{
if (_source)
{
// Fill the list of media lines for the source.
_source.OnAddedToMediaLine(this);
}
if (_receiver)
{
_receiver.OnAddedToMediaLine(this);
}
}
// Called by PeerConnection.OnDestroy.
internal void OnDestroy()
{
if (_source)
{
// Fill the list of media lines for the source.
_source.OnRemovedFromMediaLine(this);
}
if (_receiver)
{
_receiver.OnRemovedFromMediaLine(this);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 6e4097ad19fee6444b3ddd95e13e0bf2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,96 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Base class for media producers generating frames by receiving them from a remote peer.
/// </summary>
public abstract class MediaReceiver : MonoBehaviour
{
/// <summary>
/// Media kind of the receiver.
/// </summary>
public abstract MediaKind MediaKind { get; }
/// <summary>
/// Remote track associated with this receiver.
/// <c>null</c> if this object is not receiving at this time.
/// </summary>
/// <remarks>
/// This is always a <see cref="RemoteAudioTrack"/> or a <see cref="RemoteVideoTrack"/>
/// </remarks>
public abstract MediaTrack Track { get; }
/// <summary>
/// Is the media source currently producing frames received from the remote peer?
/// This is <c>true</c> while the remote media track exists, which is notified by
/// events on the <see cref="AudioReceiver"/> or <see cref="VideoReceiver"/>.
/// </summary>
public bool IsLive => Track != null;
/// <summary>
/// Transceiver this receiver is paired with, if any.
///
/// This is <c>null</c> until a remote description is applied which pairs the media line
/// this receiver is associated with to a transceiver, or until the peer connection of this
/// receiver's media line creates the receiver right before creating an SDP offer.
/// </summary>
public Transceiver Transceiver => MediaLine?.Transceiver;
/// <summary>
/// Media line this receiver is paired with, if any.
/// </summary>
/// <remarks>
/// Note that this is set to the connected <see cref="Unity.MediaLine"/> only if the owning
/// <see cref="PeerConnection"/> is awake. This will be automatically reset if the
/// <see cref="PeerConnection"/> owning the <see cref="Unity.MediaLine"/>is destroyed.
/// </remarks>
public MediaLine MediaLine { get; private set; }
/// <summary>
/// Internal callback invoked when the media receiver is assigned to a media line.
/// </summary>
/// <param name="mediaLine">The new media line this receiver is assigned to.</param>
protected internal virtual void OnAddedToMediaLine(MediaLine mediaLine)
{
Debug.Assert(MediaLine == null);
MediaLine = mediaLine;
}
/// <summary>
/// Internal callback invoked when the media receiver is de-assigned from a media line.
/// </summary>
/// <param name="mediaLine">The old media line this receiver was assigned to.</param>
protected internal virtual void OnRemovedFromMediaLine(MediaLine mediaLine)
{
Debug.Assert(MediaLine == mediaLine);
MediaLine = null;
}
/// <summary>
/// Internal callback invoked when the receiver is paired with a media track.
/// </summary>
/// <remarks>
/// This will be called on the Unity update thread.
/// </remarks>
/// <param name="track">The media track this receiver is paired with.</param>
protected internal virtual void OnPaired(MediaTrack track) { }
/// <summary>
/// Internal callback invoked when the receiver is unpaired from a media track.
/// </summary>
/// <remarks>
/// This will be called on the Unity update thread.
/// </remarks>
/// <param name="track">The media track this receiver was paired with.</param>
protected internal virtual void OnUnpaired(MediaTrack track) { }
protected void OnDestroy()
{
MediaLine?.OnReceiverDestroyed();
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 14c95be69b6ca7c4fa178cfa7c465745
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,65 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System.Collections.Generic;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Base class for media track source components producing some media frames locally.
/// </summary>
/// <seealso cref="AudioTrackSource"/>
/// <seealso cref="VideoTrackSource"/>
public abstract class MediaTrackSource : MonoBehaviour
{
/// <summary>
/// Media kind of the track source.
/// </summary>
public abstract MediaKind MediaKind { get; }
/// <summary>
/// Indicates if the source is currently producing frames.
/// </summary>
public abstract bool IsLive { get; }
/// <summary>
/// List of audio media lines using this source.
/// </summary>
/// <remarks>
/// Note that a connected <see cref="MediaLine"/> will be added to this only if the owning
/// <see cref="PeerConnection"/> is awake. A <see cref="MediaLine"/> will be automatically
/// removed if the owning <see cref="PeerConnection"/> is destroyed.
/// </remarks>
public IReadOnlyList<MediaLine> MediaLines => _mediaLines;
private readonly List<MediaLine> _mediaLines = new List<MediaLine>();
internal void OnAddedToMediaLine(MediaLine mediaLine)
{
Debug.Assert(!_mediaLines.Contains(mediaLine));
_mediaLines.Add(mediaLine);
}
internal void OnRemovedFromMediaLine(MediaLine mediaLine)
{
bool removed = _mediaLines.Remove(mediaLine);
Debug.Assert(removed);
}
protected void AttachToMediaLines()
{
foreach (var ml in _mediaLines)
{
ml.AttachSource();
}
}
protected void DetachFromMediaLines()
{
foreach (var ml in _mediaLines)
{
ml.DetachSource();
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 909a2a3190166db4ea5ea51d02afa795
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,183 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using Microsoft.MixedReality.WebRTC.Unity.Editor;
using UnityEngine;
#if !UNITY_EDITOR && UNITY_ANDROID
using UnityEngine.Android;
#endif
#if UNITY_WSA && !UNITY_EDITOR
using System.Threading.Tasks;
using global::Windows.UI.Core;
using global::Windows.Foundation;
using global::Windows.Media.Core;
using global::Windows.Media.Capture;
using global::Windows.ApplicationModel.Core;
#endif
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// This component represents a local audio source generating audio frames from a local
/// audio capture device (microphone). The audio source can be used to create one or more
/// audio tracks sharing the same audio content.
/// </summary>
[AddComponentMenu("MixedReality-WebRTC/Microphone Source")]
public class MicrophoneSource : AudioTrackSource
{
public bool AutoGainControl => _autoGainControl;
[SerializeField]
[Tooltip("Enable automated gain control")]
[ToggleLeft]
protected bool _autoGainControl = true;
#if !UNITY_EDITOR && UNITY_ANDROID
protected bool _androidRecordAudioRequestPending = false;
protected float _androidRecordAudioRequestRetryUntilTime = 0f;
#endif
protected async void OnEnable()
{
if (Source != null)
{
return;
}
#if !UNITY_EDITOR && UNITY_ANDROID
// Ensure Android binding is initialized before accessing the native implementation
Android.Initialize();
// Check for permission to access the camera
if (!Permission.HasUserAuthorizedPermission(Permission.Microphone))
{
if (!_androidRecordAudioRequestPending)
{
// Monitor the OnApplicationFocus(true) event during the next 5 minutes,
// and check for permission again each time (see below why).
_androidRecordAudioRequestPending = true;
_androidRecordAudioRequestRetryUntilTime = Time.time + 300;
// Display dialog requesting user permission. This will return immediately,
// and unfortunately there's no good way to tell when this completes. As a rule
// of thumb, application should lose focus, so check when focus resumes should
// be sufficient without having to poll every frame.
Permission.RequestUserPermission(Permission.Microphone);
}
return;
}
#endif
#if UNITY_WSA && !UNITY_EDITOR
// Request access to audio capture. The OS may show some popup dialog to the
// user to request permission. This will succeed only if the user approves it.
try
{
if (UnityEngine.WSA.Application.RunningOnUIThread())
{
await RequestAccessAsync();
}
else
{
UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true);
}
}
catch (Exception ex)
{
// Log an error and prevent activation
Debug.LogError($"Audio access failure: {ex.Message}.");
this.enabled = false;
return;
}
#endif
var initConfig = new LocalAudioDeviceInitConfig
{
AutoGainControl = _autoGainControl,
};
try
{
AttachSource(await DeviceAudioTrackSource.CreateAsync(initConfig));
}
catch (Exception ex)
{
Debug.LogError($"Failed to create device track source for {nameof(MicrophoneSource)} component '{name}'.");
Debug.LogException(ex, this);
return;
}
}
#if !UNITY_EDITOR && UNITY_ANDROID
protected void OnApplicationFocus(bool hasFocus)
{
if (!hasFocus)
{
return;
}
// If focus is restored after a pending request, check the permission again
if (_androidRecordAudioRequestPending)
{
_androidRecordAudioRequestPending = false;
if (Permission.HasUserAuthorizedPermission(Permission.Microphone))
{
// If now authorized, start capture as if just enabled
Debug.Log("User granted authorization to access microphone, starting MicrophoneSource now...");
OnEnable();
}
else if (Time.time <= _androidRecordAudioRequestRetryUntilTime)
{
// OnApplicationFocus(true) may be called for unrelated reason(s) so do not disable on first call,
// but instead retry during a given period after the request was made, until we're reasonably
// confident that the user dialog was actually answered (that is, that OnApplicationFocus(true) was
// called because of that dialog, and not because of another reason).
// This may lead to false positives (checking permission after the user denied it), but the user
// dialog will not popup again, so this is all in the background and essentially harmless.
_androidRecordAudioRequestPending = true;
}
else
{
// Some reasonable time passed since we made the permission request, and we still get a denied
// answer, so assume the user actually denied it and stop retrying.
_androidRecordAudioRequestRetryUntilTime = 0f;
Debug.LogError("User denied RecordAudio (microphone) permission; cannot use MicrophoneSource. Forcing enabled=false.");
enabled = false;
}
}
}
#endif
protected void OnDisable()
{
DisposeSource();
}
#if UNITY_WSA && !UNITY_EDITOR
/// <summary>
/// Internal UWP helper to ensure device access.
/// </summary>
/// <remarks>
/// This must be called from the main UWP UI thread (not the main Unity app thread).
/// </remarks>
private Task RequestAccessAsync()
{
// On UWP the app must have the "microphone" capability, and the user must allow microphone
// access. So check that access before trying to initialize the WebRTC library, as this
// may result in a popup window being displayed the first time, which needs to be accepted
// before the microphone can be accessed by WebRTC.
var mediaAccessRequester = new MediaCapture();
var mediaSettings = new MediaCaptureInitializationSettings();
mediaSettings.AudioDeviceId = "";
mediaSettings.VideoDeviceId = "";
mediaSettings.StreamingCaptureMode = StreamingCaptureMode.Audio;
mediaSettings.PhotoCaptureSource = PhotoCaptureSource.VideoPreview;
mediaSettings.SharingMode = MediaCaptureSharingMode.SharedReadOnly; // for MRC and lower res camera
return mediaAccessRequester.InitializeAsync(mediaSettings).AsTask();
}
#endif
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: dafbc4e8e99e46e41823d17cdabbd651
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,283 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Threading.Tasks;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.XR;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Custom video source capturing the Unity scene content as rendered by a given camera,
/// and sending it as a video track through the selected peer connection.
/// </summary>
public class SceneVideoSource : CustomVideoSource<Argb32VideoFrameStorage>
{
/// <summary>
/// Camera used to capture the scene content, whose rendering is used as
/// video content for the track.
/// </summary>
/// <remarks>
/// If the project uses Multi-Pass stereoscopic rendering, then this camera needs to
/// render to a single eye to produce a single video frame. Generally this means that
/// this needs to be a separate Unity camera from the one used for XR rendering, which
/// is generally rendering to both eyes.
///
/// If the project uses Single-Pass Instanced stereoscopic rendering, then Unity 2019.1+
/// is required to make this component work, due to the fact earlier versions of Unity
/// are missing some command buffer API calls to be able to efficiently access the camera
/// backbuffer in this mode. For Unity 2018.3 users who cannot upgrade, use Single-Pass
/// (non-instanced) instead.
/// </remarks>
[Header("Camera")]
[Tooltip("Camera used to capture the scene content sent by the track.")]
[CaptureCamera]
public Camera SourceCamera;
/// <summary>
/// Camera event indicating the point in time during the Unity frame rendering
/// when the camera rendering is to be captured.
///
/// This defaults to <see xref="CameraEvent.AfterEverything"/>, which is a reasonable
/// default to capture the entire scene rendering, but can be customized to achieve
/// other effects like capturing only a part of the scene.
/// </summary>
[Tooltip("Camera event when to insert the scene capture at.")]
public CameraEvent CameraEvent = CameraEvent.AfterEverything;
/// <summary>
/// Command buffer attached to the camera to capture its rendered content from the GPU
/// and transfer it to the CPU for dispatching to WebRTC.
/// </summary>
private CommandBuffer _commandBuffer;
/// <summary>
/// Read-back texture where the content of the camera backbuffer is copied before being
/// transferred from GPU to CPU. The size of the texture is <see cref="_frameSize"/>.
/// </summary>
private RenderTexture _readBackTex;
/// <summary>
/// Cached width, in pixels, of the readback texture and video frame produced.
/// </summary>
private int _readBackWidth;
/// <summary>
/// Cached height, in pixels, of the readback texture and video frame produced.
/// </summary>
private int _readBackHeight;
/// <summary>
/// Temporary storage for frames generated by GPU readback until consumed by WebRTC.
/// </summary>
private VideoFrameQueue<Argb32VideoFrameStorage> _frameQueue = new VideoFrameQueue<Argb32VideoFrameStorage>(3);
protected override void OnEnable()
{
if (!SystemInfo.supportsAsyncGPUReadback)
{
Debug.LogError("This platform does not support async GPU readback. Cannot use the SceneVideoSender component.");
enabled = false;
return;
}
// If no camera provided, attempt to fallback to main camera
if (SourceCamera == null)
{
var mainCameraGameObject = GameObject.FindGameObjectWithTag("MainCamera");
if (mainCameraGameObject != null)
{
SourceCamera = mainCameraGameObject.GetComponent<Camera>();
}
}
if (SourceCamera == null)
{
throw new NullReferenceException("Empty source camera for SceneVideoSource, and could not find MainCamera as fallback.");
}
CreateCommandBuffer();
SourceCamera.AddCommandBuffer(CameraEvent, _commandBuffer);
// Create the track source
base.OnEnable();
}
protected override void OnDisable()
{
base.OnDisable();
if (_commandBuffer != null)
{
// The camera sometimes goes away before this component.
if (SourceCamera != null)
{
SourceCamera.RemoveCommandBuffer(CameraEvent, _commandBuffer);
}
_commandBuffer.Dispose();
_commandBuffer = null;
}
}
/// <summary>
/// Create the command buffer reading the scene content from the source camera back into CPU memory
/// and delivering it via the <see cref="OnSceneFrameReady(AsyncGPUReadbackRequest)"/> callback to
/// the underlying WebRTC track.
/// </summary>
private void CreateCommandBuffer()
{
if (_commandBuffer != null)
{
throw new InvalidOperationException("Command buffer already initialized.");
}
// By default, use the camera's render target texture size
_readBackWidth = SourceCamera.scaledPixelWidth;
_readBackHeight = SourceCamera.scaledPixelHeight;
// Offset and scale into source render target.
Vector2 srcScale = Vector2.one;
Vector2 srcOffset = Vector2.zero;
RenderTextureFormat srcFormat = RenderTextureFormat.ARGB32;
// Handle stereoscopic rendering for VR/AR.
// See https://unity3d.com/how-to/XR-graphics-development-tips for details.
if (SourceCamera.stereoEnabled)
{
// Readback size is the size of the texture for a single eye.
// The readback will occur on the left eye (chosen arbitrarily).
_readBackWidth = XRSettings.eyeTextureWidth;
_readBackHeight = XRSettings.eyeTextureHeight;
srcFormat = XRSettings.eyeTextureDesc.colorFormat;
if (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.MultiPass)
{
// Multi-pass is similar to non-stereo, nothing to do.
// Ensure camera is not rendering to both eyes in multi-pass stereo, otherwise the command buffer
// is executed twice (once per eye) and will produce twice as many frames, which leads to stuttering
// when playing back the video stream resulting from combining those frames.
if (SourceCamera.stereoTargetEye == StereoTargetEyeMask.Both)
{
throw new InvalidOperationException("SourceCamera has stereoscopic rendering enabled to both eyes" +
" with multi-pass rendering (XRSettings.stereoRenderingMode = MultiPass). This is not supported" +
" with SceneVideoSource, as this would produce one image per eye. Either set XRSettings." +
"stereoRenderingMode to single-pass (instanced or not), or use multi-pass with a camera rendering" +
" to a single eye (Camera.stereoTargetEye != Both).");
}
}
else if (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePass)
{
// Single-pass (non-instanced) stereo use "wide-buffer" packing.
// Left eye corresponds to the left half of the buffer.
srcScale.x = 0.5f;
}
else if ((XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassInstanced)
|| (XRSettings.stereoRenderingMode == XRSettings.StereoRenderingMode.SinglePassMultiview)) // same as instanced (OpenGL)
{
// Single-pass instanced stereo use texture array packing.
// Left eye corresponds to the first array slice.
#if !UNITY_2019_1_OR_NEWER
// https://unity3d.com/unity/alpha/2019.1.0a13
// "Graphics: Graphics.Blit and CommandBuffer.Blit methods now support blitting to and from texture arrays."
throw new NotSupportedException("Capturing scene content in single-pass instanced stereo rendering requires" +
" blitting from the Texture2DArray render target of the camera, which is not supported before Unity 2019.1." +
" To use this feature, either upgrade your project to Unity 2019.1+ or use single-pass non-instanced stereo" +
" rendering (XRSettings.stereoRenderingMode = SinglePass).");
#endif
}
}
_readBackTex = new RenderTexture(_readBackWidth, _readBackHeight, 0, srcFormat, RenderTextureReadWrite.Linear);
_commandBuffer = new CommandBuffer();
_commandBuffer.name = "SceneVideoSource";
// Explicitly set the render target to instruct the GPU to discard previous content.
// https://docs.unity3d.com/ScriptReference/Rendering.CommandBuffer.Blit.html recommends this.
//< TODO - This doesn't work
//_commandBuffer.SetRenderTarget(_readBackTex, RenderBufferLoadAction.DontCare, RenderBufferStoreAction.Store);
// Copy camera target to readback texture
_commandBuffer.BeginSample("Blit");
#if UNITY_2019_1_OR_NEWER
int srcSliceIndex = 0; // left eye
int dstSliceIndex = 0;
_commandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, /*BuiltinRenderTextureType.CurrentActive*/_readBackTex,
srcScale, srcOffset, srcSliceIndex, dstSliceIndex);
#else
_commandBuffer.Blit(BuiltinRenderTextureType.CameraTarget, /*BuiltinRenderTextureType.CurrentActive*/_readBackTex, srcScale, srcOffset);
#endif
_commandBuffer.EndSample("Blit");
// Copy readback texture to RAM asynchronously, invoking the given callback once done
_commandBuffer.BeginSample("Readback");
_commandBuffer.RequestAsyncReadback(_readBackTex, 0, TextureFormat.BGRA32, OnSceneFrameReady);
_commandBuffer.EndSample("Readback");
}
protected override void OnFrameRequested(in FrameRequest request)
{
// Try to dequeue a frame from the internal frame queue
if (_frameQueue.TryDequeue(out Argb32VideoFrameStorage storage))
{
var frame = new Argb32VideoFrame
{
width = storage.Width,
height = storage.Height,
stride = (int)storage.Width * 4
};
unsafe
{
fixed (void* ptr = storage.Buffer)
{
// Complete the request with a view over the frame buffer (no allocation)
// while the buffer is pinned into memory. The native implementation will
// make a copy into a native memory buffer if necessary before returning.
frame.data = new IntPtr(ptr);
request.CompleteRequest(frame);
}
}
// Put the allocated buffer back in the pool for reuse
_frameQueue.RecycleStorage(storage);
}
}
/// <summary>
/// Callback invoked by the command buffer when the scene frame GPU readback has completed
/// and the frame is available in CPU memory.
/// </summary>
/// <param name="request">The completed and possibly failed GPU readback request.</param>
private void OnSceneFrameReady(AsyncGPUReadbackRequest request)
{
// Read back the data from GPU, if available
if (request.hasError)
{
return;
}
NativeArray<byte> rawData = request.GetData<byte>();
Debug.Assert(rawData.Length >= _readBackWidth * _readBackHeight * 4);
unsafe
{
byte* ptr = (byte*)NativeArrayUnsafeUtility.GetUnsafePtr(rawData);
// Enqueue a frame in the internal frame queue. This will make a copy
// of the frame into a pooled buffer owned by the frame queue.
var frame = new Argb32VideoFrame
{
data = (IntPtr)ptr,
stride = _readBackWidth * 4,
width = (uint)_readBackWidth,
height = (uint)_readBackHeight
};
_frameQueue.Enqueue(frame);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0ec319cd4e697a04191b92210be8c03b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,109 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.WebRTC;
using Microsoft.MixedReality.WebRTC.Unity;
using System;
using System.Collections.Generic;
using UnityEngine;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// A video source producing some colored frames generated programmatically.
/// </summary>
public class UniformColorVideoSource : CustomVideoSource<Argb32VideoFrameStorage>
{
/// <summary>
/// List of colors to cycle through.
/// </summary>
[Tooltip("List of colors to cycle through")]
public List<Color32> Colors = new List<Color32>();
/// <summary>
/// Color cycling speed, in change per second.
/// </summary>
[Tooltip("Color cycling speed, in change per second")]
public float Speed = 1f;
/// <summary>
/// Frame width, in pixels.
/// </summary>
private const int FrameWidth = 16;
/// <summary>
/// Frame height, in pixels.
/// </summary>
private const int FrameHeight = 16;
/// <summary>
/// Row stride, in bytes.
/// </summary>
private const int FrameStride = FrameWidth * 4;
/// <summary>
/// Frame buffer size, in pixels.
/// </summary>
private const int FrameSize = FrameWidth * FrameHeight;
private uint[] _data = new uint[FrameSize];
private int _index = -2;
protected void Start()
{
// Update buffer on start in case OnFrameRequested() is called before Update()
UpdateBuffer();
}
protected void Update()
{
UpdateBuffer();
}
protected void UpdateBuffer()
{
if (Colors.Count > 0)
{
int index = Mathf.FloorToInt(Time.time * Speed) % Colors.Count;
if (index != _index)
{
_index = index;
var col32 = Colors[index];
uint color = col32.b | (uint)col32.g << 8 | (uint)col32.r << 16 | (uint)col32.a << 24;
for (int k = 0; k < FrameSize; ++k)
{
_data[k] = color;
}
}
}
else if (_index != -1)
{
// Fallback to bright purple
_index = -1;
uint color = 0xFFFF00FFu;
for (int k = 0; k < FrameSize; ++k)
{
_data[k] = color;
}
}
}
protected override void OnFrameRequested(in FrameRequest request)
{
var frame = new Argb32VideoFrame
{
width = FrameWidth,
height = FrameHeight,
stride = FrameStride
};
unsafe
{
fixed (void* ptr = _data)
{
frame.data = (IntPtr)ptr;
request.CompleteRequest(frame);
}
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 29b5ec2fdf160bb4ebd2d5159450fd2f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,92 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEngine.Events;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Unity event corresponding to a new video stream being started.
/// </summary>
[Serializable]
public class VideoStreamStartedEvent : UnityEvent<IVideoSource>
{ };
/// <summary>
/// Unity event corresponding to an on-going video stream being stopped.
/// </summary>
[Serializable]
public class VideoStreamStoppedEvent : UnityEvent<IVideoSource>
{ };
/// <summary>
/// Endpoint for a WebRTC remote video track.
/// </summary>
/// <remarks>
/// Setting this on a video <see cref="MediaLine"/> will enable the corresponding transceiver to receive.
/// A remote track will be exposed through <see cref="VideoTrack"/> once a connection is established.
/// The video track can optionally be displayed locally with a <see cref="VideoRenderer"/>.
/// </remarks>
[AddComponentMenu("MixedReality-WebRTC/Video Receiver")]
public class VideoReceiver : MediaReceiver
{
/// <summary>
/// Remote video track receiving data from the remote peer.
///
/// This is <c>null</c> until <see cref="MediaLine.Transceiver"/> is set to a non-null value
/// and a remote track is added to that transceiver.
/// </summary>
public RemoteVideoTrack VideoTrack { get; private set; }
/// <summary>
/// Event raised when the video stream started.
///
/// When this event is raised, the followings are true:
/// - The <see cref="Track"/> property is a valid remote video track.
/// - The <see cref="MediaReceiver.IsLive"/> property is <c>true</c>.
/// </summary>
/// <remarks>
/// This event is raised from the main Unity thread to allow Unity object access.
/// </remarks>
public VideoStreamStartedEvent VideoStreamStarted = new VideoStreamStartedEvent();
/// <summary>
/// Event raised when the video stream stopped.
///
/// When this event is raised, the followings are true:
/// - The <see cref="Track"/> property is <c>null</c>.
/// - The <see cref="MediaReceiver.IsLive"/> property is <c>false</c>.
/// </summary>
/// <remarks>
/// This event is raised from the main Unity thread to allow Unity object access.
/// </remarks>
public VideoStreamStoppedEvent VideoStreamStopped = new VideoStreamStoppedEvent();
/// <inheritdoc/>
public override MediaKind MediaKind => MediaKind.Video;
/// <inheritdoc/>
public override MediaTrack Track => VideoTrack;
/// <inheritdoc/>
protected internal override void OnPaired(MediaTrack track)
{
var remoteVideoTrack = (RemoteVideoTrack)track;
Debug.Assert(VideoTrack == null);
VideoTrack = remoteVideoTrack;
VideoStreamStarted.Invoke(VideoTrack);
}
/// <inheritdoc/>
protected internal override void OnUnpaired(MediaTrack track)
{
Debug.Assert(track is RemoteVideoTrack);
Debug.Assert(VideoTrack == track);
VideoTrack = null;
VideoStreamStopped.Invoke(VideoTrack);
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b5c8b8e916d2cbf40a9dd142c021c4d6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@ -0,0 +1,348 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
using Unity.Profiling;
using System;
using Microsoft.MixedReality.WebRTC.Unity.Editor;
namespace Microsoft.MixedReality.WebRTC.Unity
{
/// <summary>
/// Utility component used to play video frames obtained from a WebRTC video track. This can indiscriminately
/// play video frames from a video track source on the local peer as well as video frames from a remote video
/// receiver obtaining its frame from a remote WebRTC peer.
/// </summary>
/// <remarks>
/// This component writes to the attached <a href="https://docs.unity3d.com/ScriptReference/Material.html">Material</a>,
/// via the attached <a href="https://docs.unity3d.com/ScriptReference/Renderer.html">Renderer</a>.
/// </remarks>
[RequireComponent(typeof(Renderer))]
[AddComponentMenu("MixedReality-WebRTC/Video Renderer")]
public class VideoRenderer : MonoBehaviour
{
[Tooltip("Max playback framerate, in frames per second")]
[Range(0.001f, 120f)]
public float MaxFramerate = 30f;
[Header("Statistics")]
[ToggleLeft]
public bool EnableStatistics = true;
/// <summary>
/// A textmesh onto which frame load stat data will be written
/// </summary>
/// <remarks>
/// This is how fast the frames are given from the underlying implementation
/// </remarks>
[Tooltip("A textmesh onto which frame load stat data will be written")]
public TextMesh FrameLoadStatHolder;
/// <summary>
/// A textmesh onto which frame present stat data will be written
/// </summary>
/// <remarks>
/// This is how fast we render frames to the display
/// </remarks>
[Tooltip("A textmesh onto which frame present stat data will be written")]
public TextMesh FramePresentStatHolder;
/// <summary>
/// A textmesh into which frame skip stat dta will be written
/// </summary>
/// <remarks>
/// This is how often we skip presenting an underlying frame
/// </remarks>
[Tooltip("A textmesh onto which frame skip stat data will be written")]
public TextMesh FrameSkipStatHolder;
// Source that this renderer is currently subscribed to.
private IVideoSource _source;
/// <summary>
/// Internal reference to the attached texture
/// </summary>
private Texture2D _textureY = null; // also used for ARGB32
private Texture2D _textureU = null;
private Texture2D _textureV = null;
/// <summary>
/// Internal timing counter
/// </summary>
private float lastUpdateTime = 0.0f;
private Material videoMaterial;
private float _minUpdateDelay;
private VideoFrameQueue<I420AVideoFrameStorage> _i420aFrameQueue = null;
private VideoFrameQueue<Argb32VideoFrameStorage> _argb32FrameQueue = null;
private ProfilerMarker displayStatsMarker = new ProfilerMarker("DisplayStats");
private ProfilerMarker loadTextureDataMarker = new ProfilerMarker("LoadTextureData");
private ProfilerMarker uploadTextureToGpuMarker = new ProfilerMarker("UploadTextureToGPU");
private void Start()
{
CreateEmptyVideoTextures();
// Leave 3ms of margin, otherwise it misses 1 frame and drops to ~20 FPS
// when Unity is running at 60 FPS.
_minUpdateDelay = Mathf.Max(0f, 1f / Mathf.Max(0.001f, MaxFramerate) - 0.003f);
}
/// <summary>
/// Start rendering the passed source.
/// </summary>
/// <remarks>
/// Can be used to handle <see cref="VideoTrackSource.VideoStreamStarted"/> or <see cref="VideoReceiver.VideoStreamStarted"/>.
/// </remarks>
public void StartRendering(IVideoSource source)
{
bool isRemote = (source is RemoteVideoTrack);
int frameQueueSize = (isRemote ? 5 : 3);
switch (source.FrameEncoding)
{
case VideoEncoding.I420A:
_i420aFrameQueue = new VideoFrameQueue<I420AVideoFrameStorage>(frameQueueSize);
source.I420AVideoFrameReady += I420AVideoFrameReady;
break;
case VideoEncoding.Argb32:
_argb32FrameQueue = new VideoFrameQueue<Argb32VideoFrameStorage>(frameQueueSize);
source.Argb32VideoFrameReady += Argb32VideoFrameReady;
break;
}
}
/// <summary>
/// Stop rendering the passed source. Must be called with the same source passed to <see cref="StartRendering(IVideoSource)"/>
/// </summary>
/// <remarks>
/// Can be used to handle <see cref="VideoTrackSource.VideoStreamStopped"/> or <see cref="VideoReceiver.VideoStreamStopped"/>.
/// </remarks>
public void StopRendering(IVideoSource _)
{
// Clear the video display to not confuse the user who could otherwise
// think that the video is still playing but is lagging/frozen.
CreateEmptyVideoTextures();
}
protected void OnDisable()
{
// Clear the video display to not confuse the user who could otherwise
// think that the video is still playing but is lagging/frozen.
CreateEmptyVideoTextures();
}
protected void I420AVideoFrameReady(I420AVideoFrame frame)
{
// This callback is generally from a non-UI thread, but Unity object access is only allowed
// on the main UI thread, so defer to that point.
_i420aFrameQueue.Enqueue(frame);
}
protected void Argb32VideoFrameReady(Argb32VideoFrame frame)
{
// This callback is generally from a non-UI thread, but Unity object access is only allowed
// on the main UI thread, so defer to that point.
_argb32FrameQueue.Enqueue(frame);
}
private void CreateEmptyVideoTextures()
{
// Create a default checkboard texture which visually indicates
// that no data is available. This is useful for debugging and
// for the user to know about the state of the video.
_textureY = new Texture2D(2, 2);
_textureY.SetPixel(0, 0, Color.blue);
_textureY.SetPixel(1, 1, Color.blue);
_textureY.Apply();
_textureU = new Texture2D(2, 2);
_textureU.SetPixel(0, 0, Color.blue);
_textureU.SetPixel(1, 1, Color.blue);
_textureU.Apply();
_textureV = new Texture2D(2, 2);
_textureV.SetPixel(0, 0, Color.blue);
_textureV.SetPixel(1, 1, Color.blue);
_textureV.Apply();
// Assign that texture to the video player's Renderer component
videoMaterial = GetComponent<Renderer>().material;
if (_i420aFrameQueue != null)
{
videoMaterial.SetTexture("_YPlane", _textureY);
videoMaterial.SetTexture("_UPlane", _textureU);
videoMaterial.SetTexture("_VPlane", _textureV);
}
else if (_argb32FrameQueue != null)
{
videoMaterial.SetTexture("_MainTex", _textureY);
}
}
//// <summary>
/// Unity Engine Start() hook
/// </summary>
/// <remarks>
/// https://docs.unity3d.com/ScriptReference/MonoBehaviour.Start.html
/// </remarks>
private void Update()
{
if ((_i420aFrameQueue != null) || (_argb32FrameQueue != null))
{
#if UNITY_EDITOR
// Inside the Editor, constantly update _minUpdateDelay to
// react to user changes to MaxFramerate.
// Leave 3ms of margin, otherwise it misses 1 frame and drops to ~20 FPS
// when Unity is running at 60 FPS.
_minUpdateDelay = Mathf.Max(0f, 1f / Mathf.Max(0.001f, MaxFramerate) - 0.003f);
#endif
// FIXME - This will overflow/underflow the queue if not set at the same rate
// as the one at which frames are enqueued!
var curTime = Time.time;
if (curTime - lastUpdateTime >= _minUpdateDelay)
{
if (_i420aFrameQueue != null)
{
TryProcessI420AFrame();
}
else if (_argb32FrameQueue != null)
{
TryProcessArgb32Frame();
}
lastUpdateTime = curTime;
}
if (EnableStatistics)
{
// Share our stats values, if possible.
using (var profileScope = displayStatsMarker.Auto())
{
IVideoFrameQueue stats = (_i420aFrameQueue != null ? (IVideoFrameQueue)_i420aFrameQueue : _argb32FrameQueue);
if (FrameLoadStatHolder != null)
{
FrameLoadStatHolder.text = stats.QueuedFramesPerSecond.ToString("F2");
}
if (FramePresentStatHolder != null)
{
FramePresentStatHolder.text = stats.DequeuedFramesPerSecond.ToString("F2");
}
if (FrameSkipStatHolder != null)
{
FrameSkipStatHolder.text = stats.DroppedFramesPerSecond.ToString("F2");
}
}
}
}
}
/// <summary>
/// Internal helper that attempts to process frame data in the frame queue
/// </summary>
private void TryProcessI420AFrame()
{
if (_i420aFrameQueue.TryDequeue(out I420AVideoFrameStorage frame))
{
int lumaWidth = (int)frame.Width;
int lumaHeight = (int)frame.Height;
if (_textureY == null || (_textureY.width != lumaWidth || _textureY.height != lumaHeight))
{
_textureY = new Texture2D(lumaWidth, lumaHeight, TextureFormat.R8, mipChain: false);
videoMaterial.SetTexture("_YPlane", _textureY);
}
int chromaWidth = lumaWidth / 2;
int chromaHeight = lumaHeight / 2;
if (_textureU == null || (_textureU.width != chromaWidth || _textureU.height != chromaHeight))
{
_textureU = new Texture2D(chromaWidth, chromaHeight, TextureFormat.R8, mipChain: false);
videoMaterial.SetTexture("_UPlane", _textureU);
}
if (_textureV == null || (_textureV.width != chromaWidth || _textureV.height != chromaHeight))
{
_textureV = new Texture2D(chromaWidth, chromaHeight, TextureFormat.R8, mipChain: false);
videoMaterial.SetTexture("_VPlane", _textureV);
}
// Copy data from C# buffer into system memory managed by Unity.
// Note: This only "looks right" in Unity because we apply the
// "YUVFeedShader(Unlit)" to the texture (converting YUV planar to RGB).
// Note: Texture2D.LoadRawTextureData() expects some bottom-up texture data but
// the WebRTC video frame is top-down, so the image is uploaded vertically flipped,
// and needs to be flipped by in the shader used to sample it. See #388.
using (var profileScope = loadTextureDataMarker.Auto())
{
unsafe
{
fixed (void* buffer = frame.Buffer)
{
var src = new IntPtr(buffer);
int lumaSize = lumaWidth * lumaHeight;
_textureY.LoadRawTextureData(src, lumaSize);
src += lumaSize;
int chromaSize = chromaWidth * chromaHeight;
_textureU.LoadRawTextureData(src, chromaSize);
src += chromaSize;
_textureV.LoadRawTextureData(src, chromaSize);
}
}
}
// Upload from system memory to GPU
using (var profileScope = uploadTextureToGpuMarker.Auto())
{
_textureY.Apply();
_textureU.Apply();
_textureV.Apply();
}
// Recycle the video frame packet for a later frame
_i420aFrameQueue.RecycleStorage(frame);
}
}
/// <summary>
/// Internal helper that attempts to process frame data in the frame queue
/// </summary>
private void TryProcessArgb32Frame()
{
if (_argb32FrameQueue.TryDequeue(out Argb32VideoFrameStorage frame))
{
int width = (int)frame.Width;
int height = (int)frame.Height;
if (_textureY == null || (_textureY.width != width || _textureY.height != height))
{
_textureY = new Texture2D(width, height, TextureFormat.BGRA32, mipChain: false);
videoMaterial.SetTexture("_MainTex", _textureY);
}
// Copy data from C# buffer into system memory managed by Unity.
// Note: Texture2D.LoadRawTextureData() expects some bottom-up texture data but
// the WebRTC video frame is top-down, so the image is uploaded vertically flipped,
// and needs to be flipped by in the shader used to sample it. See #388.
using (var profileScope = loadTextureDataMarker.Auto())
{
unsafe
{
fixed (void* buffer = frame.Buffer)
{
var src = new IntPtr(buffer);
int size = width * height * 4;
_textureY.LoadRawTextureData(src, size);
}
}
}
// Upload from system memory to GPU
using (var profileScope = uploadTextureToGpuMarker.Auto())
{
_textureY.Apply();
}
// Recycle the video frame packet for a later frame
_argb32FrameQueue.RecycleStorage(frame);
}
}
}
}

View File

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cb6b02c5bb7ab6b4ea6996c956ebdd21
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: f845cb707dda5c84594956f49655100f, type: 3}
userData:
assetBundleName:
assetBundleVariant:

Some files were not shown because too many files have changed in this diff Show More